diff --git a/examples/Pose2SLAMExample_g2o.cpp b/examples/Pose2SLAMExample_g2o.cpp index fb3869a50..4a77f7be1 100644 --- a/examples/Pose2SLAMExample_g2o.cpp +++ b/examples/Pose2SLAMExample_g2o.cpp @@ -48,16 +48,16 @@ int main(const int argc, const char *argv[]) { Values::shared_ptr initial; bool is3D = false; if (kernelType.compare("none") == 0) { - boost::tie(graph, initial) = readG2o(g2oFile, is3D); + std::tie(graph, initial) = readG2o(g2oFile, is3D); } if (kernelType.compare("huber") == 0) { std::cout << "Using robust kernel: huber " << std::endl; - boost::tie(graph, initial) = + std::tie(graph, initial) = readG2o(g2oFile, is3D, KernelFunctionTypeHUBER); } if (kernelType.compare("tukey") == 0) { std::cout << "Using robust kernel: tukey " << std::endl; - boost::tie(graph, initial) = + std::tie(graph, initial) = readG2o(g2oFile, is3D, KernelFunctionTypeTUKEY); } @@ -90,7 +90,7 @@ int main(const int argc, const char *argv[]) { std::cout << "Writing results to file: " << outputFile << std::endl; NonlinearFactorGraph::shared_ptr graphNoKernel; Values::shared_ptr initial2; - boost::tie(graphNoKernel, initial2) = readG2o(g2oFile); + std::tie(graphNoKernel, initial2) = readG2o(g2oFile); writeG2o(*graphNoKernel, result, outputFile); std::cout << "done! " << std::endl; } diff --git a/examples/Pose2SLAMExample_graph.cpp b/examples/Pose2SLAMExample_graph.cpp index c4301d28d..cf2513864 100644 --- a/examples/Pose2SLAMExample_graph.cpp +++ b/examples/Pose2SLAMExample_graph.cpp @@ -36,7 +36,7 @@ int main (int argc, char** argv) { Values::shared_ptr initial; SharedDiagonal model = noiseModel::Diagonal::Sigmas((Vector(3) << 0.05, 0.05, 5.0 * M_PI / 180.0).finished()); string graph_file = findExampleDataFile("w100.graph"); - boost::tie(graph, initial) = load2D(graph_file, model); + std::tie(graph, initial) = load2D(graph_file, model); initial->print("Initial estimate:\n"); // Add a Gaussian prior on first poses diff --git a/examples/Pose2SLAMExample_lago.cpp b/examples/Pose2SLAMExample_lago.cpp index 1d02c64fa..c470e3904 100644 --- a/examples/Pose2SLAMExample_lago.cpp +++ b/examples/Pose2SLAMExample_lago.cpp @@ -37,7 +37,7 @@ int main(const int argc, const char *argv[]) { NonlinearFactorGraph::shared_ptr graph; Values::shared_ptr initial; - boost::tie(graph, initial) = readG2o(g2oFile); + std::tie(graph, initial) = readG2o(g2oFile); // Add prior on the pose having index (key) = 0 auto priorModel = noiseModel::Diagonal::Variances(Vector3(1e-6, 1e-6, 1e-8)); @@ -55,7 +55,7 @@ int main(const int argc, const char *argv[]) { std::cout << "Writing results to file: " << outputFile << std::endl; NonlinearFactorGraph::shared_ptr graphNoKernel; Values::shared_ptr initial2; - boost::tie(graphNoKernel, initial2) = readG2o(g2oFile); + std::tie(graphNoKernel, initial2) = readG2o(g2oFile); writeG2o(*graphNoKernel, estimateLago, outputFile); std::cout << "done! " << std::endl; } diff --git a/examples/Pose3Localization.cpp b/examples/Pose3Localization.cpp index 44076ab38..4fb200faf 100644 --- a/examples/Pose3Localization.cpp +++ b/examples/Pose3Localization.cpp @@ -37,7 +37,7 @@ int main(const int argc, const char* argv[]) { NonlinearFactorGraph::shared_ptr graph; Values::shared_ptr initial; bool is3D = true; - boost::tie(graph, initial) = readG2o(g2oFile, is3D); + std::tie(graph, initial) = readG2o(g2oFile, is3D); // Add prior on the first key auto priorModel = noiseModel::Diagonal::Variances( @@ -67,7 +67,7 @@ int main(const int argc, const char* argv[]) { std::cout << "Writing results to file: " << outputFile << std::endl; NonlinearFactorGraph::shared_ptr graphNoKernel; Values::shared_ptr initial2; - boost::tie(graphNoKernel, initial2) = readG2o(g2oFile); + std::tie(graphNoKernel, initial2) = readG2o(g2oFile); writeG2o(*graphNoKernel, result, outputFile); std::cout << "done! " << std::endl; } diff --git a/examples/Pose3SLAMExample_changeKeys.cpp b/examples/Pose3SLAMExample_changeKeys.cpp index 392b1c417..ca9448ea3 100644 --- a/examples/Pose3SLAMExample_changeKeys.cpp +++ b/examples/Pose3SLAMExample_changeKeys.cpp @@ -36,7 +36,7 @@ int main(const int argc, const char *argv[]) { NonlinearFactorGraph::shared_ptr graph; Values::shared_ptr initial; bool is3D = true; - boost::tie(graph, initial) = readG2o(g2oFile, is3D); + std::tie(graph, initial) = readG2o(g2oFile, is3D); bool add = false; Key firstKey = 8646911284551352320; diff --git a/examples/Pose3SLAMExample_g2o.cpp b/examples/Pose3SLAMExample_g2o.cpp index 7ae2978ce..0c99a75a0 100644 --- a/examples/Pose3SLAMExample_g2o.cpp +++ b/examples/Pose3SLAMExample_g2o.cpp @@ -36,7 +36,7 @@ int main(const int argc, const char* argv[]) { NonlinearFactorGraph::shared_ptr graph; Values::shared_ptr initial; bool is3D = true; - boost::tie(graph, initial) = readG2o(g2oFile, is3D); + std::tie(graph, initial) = readG2o(g2oFile, is3D); // Add prior on the first key auto priorModel = noiseModel::Diagonal::Variances( @@ -66,7 +66,7 @@ int main(const int argc, const char* argv[]) { std::cout << "Writing results to file: " << outputFile << std::endl; NonlinearFactorGraph::shared_ptr graphNoKernel; Values::shared_ptr initial2; - boost::tie(graphNoKernel, initial2) = readG2o(g2oFile); + std::tie(graphNoKernel, initial2) = readG2o(g2oFile); writeG2o(*graphNoKernel, result, outputFile); std::cout << "done! " << std::endl; } diff --git a/examples/Pose3SLAMExample_initializePose3Chordal.cpp b/examples/Pose3SLAMExample_initializePose3Chordal.cpp index 03db9fc77..c1cc97c85 100644 --- a/examples/Pose3SLAMExample_initializePose3Chordal.cpp +++ b/examples/Pose3SLAMExample_initializePose3Chordal.cpp @@ -36,7 +36,7 @@ int main(const int argc, const char* argv[]) { NonlinearFactorGraph::shared_ptr graph; Values::shared_ptr initial; bool is3D = true; - boost::tie(graph, initial) = readG2o(g2oFile, is3D); + std::tie(graph, initial) = readG2o(g2oFile, is3D); // Add prior on the first key auto priorModel = noiseModel::Diagonal::Variances( @@ -60,7 +60,7 @@ int main(const int argc, const char* argv[]) { std::cout << "Writing results to file: " << outputFile << std::endl; NonlinearFactorGraph::shared_ptr graphNoKernel; Values::shared_ptr initial2; - boost::tie(graphNoKernel, initial2) = readG2o(g2oFile); + std::tie(graphNoKernel, initial2) = readG2o(g2oFile); writeG2o(*graphNoKernel, initialization, outputFile); std::cout << "done! " << std::endl; } diff --git a/examples/Pose3SLAMExample_initializePose3Gradient.cpp b/examples/Pose3SLAMExample_initializePose3Gradient.cpp index 31693c5ff..740391017 100644 --- a/examples/Pose3SLAMExample_initializePose3Gradient.cpp +++ b/examples/Pose3SLAMExample_initializePose3Gradient.cpp @@ -36,7 +36,7 @@ int main(const int argc, const char* argv[]) { NonlinearFactorGraph::shared_ptr graph; Values::shared_ptr initial; bool is3D = true; - boost::tie(graph, initial) = readG2o(g2oFile, is3D); + std::tie(graph, initial) = readG2o(g2oFile, is3D); // Add prior on the first key auto priorModel = noiseModel::Diagonal::Variances( @@ -66,7 +66,7 @@ int main(const int argc, const char* argv[]) { std::cout << "Writing results to file: " << outputFile << std::endl; NonlinearFactorGraph::shared_ptr graphNoKernel; Values::shared_ptr initial2; - boost::tie(graphNoKernel, initial2) = readG2o(g2oFile); + std::tie(graphNoKernel, initial2) = readG2o(g2oFile); writeG2o(*graphNoKernel, initialization, outputFile); std::cout << "done! " << std::endl; } diff --git a/examples/RangeISAMExample_plaza2.cpp b/examples/RangeISAMExample_plaza2.cpp index aa61ffc6c..39cc6c4ef 100644 --- a/examples/RangeISAMExample_plaza2.cpp +++ b/examples/RangeISAMExample_plaza2.cpp @@ -92,7 +92,7 @@ std::list readOdometry() { // load the ranges from TD // Time (sec) Sender / Antenna ID Receiver Node ID Range (m) -using RangeTriple = boost::tuple; +using RangeTriple = std::tuple; std::vector readTriples() { std::vector triples; std::string data_file = gtsam::findExampleDataFile("Plaza2_TD.txt"); @@ -166,7 +166,7 @@ int main(int argc, char** argv) { //--------------------------------- odometry loop -------------------------- double t; Pose2 odometry; - boost::tie(t, odometry) = timedOdometry; + std::tie(t, odometry) = timedOdometry; // add odometry factor newFactors.emplace_shared>(i - 1, i, odometry, @@ -178,10 +178,10 @@ int main(int argc, char** argv) { initial.insert(i, predictedPose); // Check if there are range factors to be added - while (k < K && t >= boost::get<0>(triples[k])) { - size_t j = boost::get<1>(triples[k]); + while (k < K && t >= std::get<0>(triples[k])) { + size_t j = std::get<1>(triples[k]); Symbol landmark_key('L', j); - double range = boost::get<2>(triples[k]); + double range = std::get<2>(triples[k]); newFactors.emplace_shared>( i, landmark_key, range, rangeNoise); if (initializedLandmarks.count(landmark_key) == 0) { diff --git a/examples/ShonanAveragingCLI.cpp b/examples/ShonanAveragingCLI.cpp index c72a32017..831b608e3 100644 --- a/examples/ShonanAveragingCLI.cpp +++ b/examples/ShonanAveragingCLI.cpp @@ -103,7 +103,7 @@ int main(int argc, char* argv[]) { auto result = shonan.run(initial, pMin); // Parse file again to set up translation problem, adding a prior - boost::tie(inputGraph, posesInFile) = load2D(inputFile); + std::tie(inputGraph, posesInFile) = load2D(inputFile); auto priorModel = noiseModel::Unit::Create(3); inputGraph->addPrior(0, posesInFile->at(0), priorModel); @@ -119,7 +119,7 @@ int main(int argc, char* argv[]) { auto result = shonan.run(initial, pMin); // Parse file again to set up translation problem, adding a prior - boost::tie(inputGraph, posesInFile) = load3D(inputFile); + std::tie(inputGraph, posesInFile) = load3D(inputFile); auto priorModel = noiseModel::Unit::Create(6); inputGraph->addPrior(0, posesInFile->at(0), priorModel); diff --git a/examples/SolverComparer.cpp b/examples/SolverComparer.cpp index 4cbaaf617..34b30c10a 100644 --- a/examples/SolverComparer.cpp +++ b/examples/SolverComparer.cpp @@ -49,8 +49,6 @@ #include #include #include -#include -#include #include #include diff --git a/gtsam/base/Matrix.cpp b/gtsam/base/Matrix.cpp index 5b8a021d4..e7ee7b905 100644 --- a/gtsam/base/Matrix.cpp +++ b/gtsam/base/Matrix.cpp @@ -23,7 +23,6 @@ #include #include -#include #include #include @@ -251,7 +250,7 @@ pair qr(const Matrix& A) { // calculate the Householder vector v double beta; Vector vjm; - boost::tie(beta,vjm) = house(xjm); + std::tie(beta,vjm) = house(xjm); // pad with zeros to get m-dimensional vector v for(size_t k = 0 ; k < m; k++) @@ -269,13 +268,13 @@ pair qr(const Matrix& A) { } /* ************************************************************************* */ -list > +list > weighted_eliminate(Matrix& A, Vector& b, const Vector& sigmas) { size_t m = A.rows(), n = A.cols(); // get size(A) size_t maxRank = min(m,n); // create list - list > results; + list > results; Vector pseudo(m); // allocate storage for pseudo-inverse Vector weights = sigmas.array().square().inverse(); // calculate weights once @@ -304,7 +303,7 @@ weighted_eliminate(Matrix& A, Vector& b, const Vector& sigmas) { // construct solution (r, d, sigma) // TODO: avoid sqrt, store precision or at least variance - results.push_back(boost::make_tuple(r, d, 1./sqrt(precision))); + results.push_back(std::make_tuple(r, d, 1./sqrt(precision))); // exit after rank exhausted if (results.size()>=maxRank) break; @@ -565,7 +564,7 @@ void svd(const Matrix& A, Matrix& U, Vector& S, Matrix& V) { } /* ************************************************************************* */ -boost::tuple DLT(const Matrix& A, double rank_tol) { +std::tuple DLT(const Matrix& A, double rank_tol) { // Check size of A size_t n = A.rows(), p = A.cols(), m = min(n,p); @@ -582,7 +581,7 @@ boost::tuple DLT(const Matrix& A, double rank_tol) { // Return rank, error, and corresponding column of V double error = m

((int)rank, error, Vector(column(V, p-1))); + return std::tuple((int)rank, error, Vector(column(V, p-1))); } /* ************************************************************************* */ diff --git a/gtsam/base/Matrix.h b/gtsam/base/Matrix.h index 88714d6f6..60fed671a 100644 --- a/gtsam/base/Matrix.h +++ b/gtsam/base/Matrix.h @@ -26,7 +26,6 @@ #include #include -#include #include @@ -307,7 +306,7 @@ GTSAM_EXPORT void inplace_QR(Matrix& A); * @param sigmas is a vector of the measurement standard deviation * @return list of r vectors, d and sigma */ -GTSAM_EXPORT std::list > +GTSAM_EXPORT std::list > weighted_eliminate(Matrix& A, Vector& b, const Vector& sigmas); /** @@ -434,7 +433,7 @@ GTSAM_EXPORT void svd(const Matrix& A, Matrix& U, Vector& S, Matrix& V); * Returns rank of A, minimum error (singular value), * and corresponding eigenvector (column of V, with A=U*S*V') */ -GTSAM_EXPORT boost::tuple +GTSAM_EXPORT std::tuple DLT(const Matrix& A, double rank_tol = 1e-9); /** diff --git a/gtsam/base/tests/testMatrix.cpp b/gtsam/base/tests/testMatrix.cpp index fb3bd948c..1b3ef700c 100644 --- a/gtsam/base/tests/testMatrix.cpp +++ b/gtsam/base/tests/testMatrix.cpp @@ -20,7 +20,6 @@ #include #include #include -#include #include #include #include @@ -859,7 +858,7 @@ TEST(Matrix, qr ) 7.4536, 0, 00, 0, 0, 10.9545, 00, 0, 0, 0, 00, 0, 0, 0).finished(); Matrix Q, R; - boost::tie(Q, R) = qr(A); + std::tie(Q, R) = qr(A); EXPECT(assert_equal(expectedQ, Q, 1e-4)); EXPECT(assert_equal(expectedR, R, 1e-4)); EXPECT(assert_equal(A, Q*R, 1e-14)); @@ -911,7 +910,7 @@ TEST(Matrix, weighted_elimination ) // perform elimination Matrix A1 = A; Vector b1 = b; - std::list > solution = + std::list > solution = weighted_eliminate(A1, b1, sigmas); // unpack and verify @@ -919,7 +918,7 @@ TEST(Matrix, weighted_elimination ) for (const auto& tuple : solution) { Vector r; double di, sigma; - boost::tie(r, di, sigma) = tuple; + std::tie(r, di, sigma) = tuple; EXPECT(assert_equal(r, expectedR.row(i))); // verify r DOUBLES_EQUAL(d(i), di, 1e-8); // verify d DOUBLES_EQUAL(newSigmas(i), sigma, 1e-5); // verify sigma @@ -1146,7 +1145,7 @@ TEST(Matrix, DLT ) int rank; double error; Vector actual; - boost::tie(rank,error,actual) = DLT(A); + std::tie(rank,error,actual) = DLT(A); Vector expected = (Vector(9) << -0.0, 0.2357, 0.4714, -0.2357, 0.0, - 0.4714,-0.4714, 0.4714, 0.0).finished(); EXPECT_LONGS_EQUAL(8,rank); EXPECT_DOUBLES_EQUAL(0,error,1e-8); diff --git a/gtsam/base/tests/testVector.cpp b/gtsam/base/tests/testVector.cpp index c87732b09..24c40fb3e 100644 --- a/gtsam/base/tests/testVector.cpp +++ b/gtsam/base/tests/testVector.cpp @@ -19,7 +19,6 @@ #include #include #include -#include #include using namespace std; @@ -156,7 +155,7 @@ TEST(Vector, weightedPseudoinverse ) // perform solve Vector actual; double precision; - boost::tie(actual, precision) = weightedPseudoinverse(x, weights); + std::tie(actual, precision) = weightedPseudoinverse(x, weights); // construct expected Vector expected(2); @@ -181,7 +180,7 @@ TEST(Vector, weightedPseudoinverse_constraint ) Vector weights = sigmas.array().square().inverse(); // perform solve Vector actual; double precision; - boost::tie(actual, precision) = weightedPseudoinverse(x, weights); + std::tie(actual, precision) = weightedPseudoinverse(x, weights); // construct expected Vector expected(2); @@ -199,7 +198,7 @@ TEST(Vector, weightedPseudoinverse_nan ) Vector sigmas = (Vector(4) << 0.1, 0.1, 0., 0.).finished(); Vector weights = sigmas.array().square().inverse(); Vector pseudo; double precision; - boost::tie(pseudo, precision) = weightedPseudoinverse(a, weights); + std::tie(pseudo, precision) = weightedPseudoinverse(a, weights); Vector expected = (Vector(4) << 1., 0., 0.,0.).finished(); EXPECT(assert_equal(expected, pseudo)); diff --git a/gtsam/discrete/DecisionTreeFactor.h b/gtsam/discrete/DecisionTreeFactor.h index 74899b729..e59552007 100644 --- a/gtsam/discrete/DecisionTreeFactor.h +++ b/gtsam/discrete/DecisionTreeFactor.h @@ -268,5 +268,4 @@ namespace gtsam { // traits template <> struct traits : public Testable {}; - } // namespace gtsam diff --git a/gtsam/discrete/DiscreteBayesNet.cpp b/gtsam/discrete/DiscreteBayesNet.cpp index 451f5f07b..f754250ed 100644 --- a/gtsam/discrete/DiscreteBayesNet.cpp +++ b/gtsam/discrete/DiscreteBayesNet.cpp @@ -20,8 +20,6 @@ #include #include -#include - namespace gtsam { // Instantiate base class @@ -58,8 +56,9 @@ DiscreteValues DiscreteBayesNet::sample() const { DiscreteValues DiscreteBayesNet::sample(DiscreteValues result) const { // sample each node in turn in topological sort order (parents first) - for (auto conditional : boost::adaptors::reverse(*this)) - conditional->sampleInPlace(&result); + for (auto it = std::make_reverse_iterator(end()); it != std::make_reverse_iterator(begin()); ++it) { + (*it)->sampleInPlace(&result); + } return result; } diff --git a/gtsam/discrete/DiscreteLookupDAG.cpp b/gtsam/discrete/DiscreteLookupDAG.cpp index bdc77031c..ab62055ed 100644 --- a/gtsam/discrete/DiscreteLookupDAG.cpp +++ b/gtsam/discrete/DiscreteLookupDAG.cpp @@ -20,6 +20,7 @@ #include #include +#include #include #include @@ -118,8 +119,10 @@ DiscreteLookupDAG DiscreteLookupDAG::FromBayesNet( DiscreteValues DiscreteLookupDAG::argmax(DiscreteValues result) const { // Argmax each node in turn in topological sort order (parents first). - for (auto lookupTable : boost::adaptors::reverse(*this)) - lookupTable->argmaxInPlace(&result); + for (auto it = std::make_reverse_iterator(end()); it != std::make_reverse_iterator(begin()); ++it) { + // dereference to get the sharedFactor to the lookup table + (*it)->argmaxInPlace(&result); + } return result; } /* ************************************************************************** */ diff --git a/gtsam/discrete/DiscreteValues.cpp b/gtsam/discrete/DiscreteValues.cpp index b0427e91b..e21fb71db 100644 --- a/gtsam/discrete/DiscreteValues.cpp +++ b/gtsam/discrete/DiscreteValues.cpp @@ -17,7 +17,6 @@ #include -#include #include using std::cout; @@ -39,8 +38,10 @@ void DiscreteValues::print(const string& s, /* ************************************************************************ */ bool DiscreteValues::equals(const DiscreteValues& x, double tol) const { if (this->size() != x.size()) return false; - for (const auto values : boost::combine(*this, x)) { - if (values.get<0>() != values.get<1>()) return false; + auto it1 = x.begin(); + auto it2 = this->begin(); + for (; it1 != x.end(); ++it1, ++it2) { + if (it1->first != it2->first || it1->second != it2->second) return false; } return true; } diff --git a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp index 9439ff417..a4d19c96c 100644 --- a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp +++ b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp @@ -111,7 +111,7 @@ TEST(DiscreteFactorGraph, test) { frontalKeys += Key(0); DiscreteConditional::shared_ptr conditional; DecisionTreeFactor::shared_ptr newFactor; - boost::tie(conditional, newFactor) = EliminateDiscrete(graph, frontalKeys); + std::tie(conditional, newFactor) = EliminateDiscrete(graph, frontalKeys); // Check Conditional CHECK(conditional); @@ -130,7 +130,7 @@ TEST(DiscreteFactorGraph, test) { DiscreteEliminationTree etree(graph, ordering); DiscreteBayesNet::shared_ptr actual; DiscreteFactorGraph::shared_ptr remainingGraph; - boost::tie(actual, remainingGraph) = etree.eliminate(&EliminateDiscrete); + std::tie(actual, remainingGraph) = etree.eliminate(&EliminateDiscrete); // Check Bayes net DiscreteBayesNet expectedBayesNet; diff --git a/gtsam/geometry/Rot3.cpp b/gtsam/geometry/Rot3.cpp index e854d03b7..153b9690e 100644 --- a/gtsam/geometry/Rot3.cpp +++ b/gtsam/geometry/Rot3.cpp @@ -170,13 +170,13 @@ Vector3 Rot3::xyz(OptionalJacobian<3, 3> H) const { #endif Matrix39 qHm; - boost::tie(I, q) = RQ(m, qHm); + std::tie(I, q) = RQ(m, qHm); // TODO : Explore whether this expression can be optimized as both // qHm and mH are super-sparse *H = qHm * mH; } else - boost::tie(I, q) = RQ(matrix()); + std::tie(I, q) = RQ(matrix()); return q; } diff --git a/gtsam/geometry/tests/testRot3.cpp b/gtsam/geometry/tests/testRot3.cpp index 0e7967715..2f48948eb 100644 --- a/gtsam/geometry/tests/testRot3.cpp +++ b/gtsam/geometry/tests/testRot3.cpp @@ -510,7 +510,7 @@ TEST( Rot3, RQ) // Try RQ on a pure rotation Matrix actualK; Vector actual; - boost::tie(actualK, actual) = RQ(R.matrix()); + std::tie(actualK, actual) = RQ(R.matrix()); Vector expected = Vector3(0.14715, 0.385821, 0.231671); CHECK(assert_equal(I_3x3,actualK)); CHECK(assert_equal(expected,actual,1e-6)); @@ -531,7 +531,7 @@ TEST( Rot3, RQ) // Try RQ to recover calibration from 3*3 sub-block of projection matrix Matrix K = (Matrix(3, 3) << 500.0, 0.0, 320.0, 0.0, 500.0, 240.0, 0.0, 0.0, 1.0).finished(); Matrix A = K * R.matrix(); - boost::tie(actualK, actual) = RQ(A); + std::tie(actualK, actual) = RQ(A); CHECK(assert_equal(K,actualK)); CHECK(assert_equal(expected,actual,1e-6)); } diff --git a/gtsam/geometry/triangulation.cpp b/gtsam/geometry/triangulation.cpp index acb0ddf0e..245b8634a 100644 --- a/gtsam/geometry/triangulation.cpp +++ b/gtsam/geometry/triangulation.cpp @@ -51,7 +51,7 @@ Vector4 triangulateHomogeneousDLT( int rank; double error; Vector v; - boost::tie(rank, error, v) = DLT(A, rank_tol); + std::tie(rank, error, v) = DLT(A, rank_tol); if (rank < 3) throw(TriangulationUnderconstrainedException()); @@ -82,7 +82,7 @@ Vector4 triangulateHomogeneousDLT( int rank; double error; Vector v; - boost::tie(rank, error, v) = DLT(A, rank_tol); + std::tie(rank, error, v) = DLT(A, rank_tol); if (rank < 3) throw(TriangulationUnderconstrainedException()); diff --git a/gtsam/geometry/triangulation.h b/gtsam/geometry/triangulation.h index 24bc94d80..05c66f8df 100644 --- a/gtsam/geometry/triangulation.h +++ b/gtsam/geometry/triangulation.h @@ -195,7 +195,7 @@ Point3 triangulateNonlinear(const std::vector& poses, // Create a factor graph and initial values Values values; NonlinearFactorGraph graph; - boost::tie(graph, values) = triangulationGraph // + std::tie(graph, values) = triangulationGraph // (poses, sharedCal, measurements, Symbol('p', 0), initialEstimate, model); return optimize(graph, values, Symbol('p', 0)); @@ -217,7 +217,7 @@ Point3 triangulateNonlinear( // Create a factor graph and initial values Values values; NonlinearFactorGraph graph; - boost::tie(graph, values) = triangulationGraph // + std::tie(graph, values) = triangulationGraph // (cameras, measurements, Symbol('p', 0), initialEstimate, model); return optimize(graph, values, Symbol('p', 0)); diff --git a/gtsam/hybrid/HybridJunctionTree.cpp b/gtsam/hybrid/HybridJunctionTree.cpp index 6c0e62ac9..b7aefcc92 100644 --- a/gtsam/hybrid/HybridJunctionTree.cpp +++ b/gtsam/hybrid/HybridJunctionTree.cpp @@ -102,7 +102,7 @@ struct HybridConstructorTraversalData { keyAsOrdering.push_back(node->key); SymbolicConditional::shared_ptr conditional; SymbolicFactor::shared_ptr separatorFactor; - boost::tie(conditional, separatorFactor) = + std::tie(conditional, separatorFactor) = internal::EliminateSymbolic(symbolicFactors, keyAsOrdering); // Store symbolic elimination results in the parent @@ -129,9 +129,9 @@ struct HybridConstructorTraversalData { // Check if we should merge the i^th child if (nrParents + nrFrontals == childConditionals[i]->nrParents()) { const bool myType = - data.discreteKeys.exists(conditional->frontals()[0]); + data.discreteKeys.exists(conditional->frontals().front()); const bool theirType = - data.discreteKeys.exists(childConditionals[i]->frontals()[0]); + data.discreteKeys.exists(childConditionals[i]->frontals().front()); if (myType == theirType) { // Increment number of frontal variables diff --git a/gtsam/inference/BayesNet-inst.h b/gtsam/inference/BayesNet-inst.h index f06008c88..a6aa0a70a 100644 --- a/gtsam/inference/BayesNet-inst.h +++ b/gtsam/inference/BayesNet-inst.h @@ -21,7 +21,6 @@ #include #include -#include #include #include @@ -56,7 +55,9 @@ void BayesNet::dot(std::ostream& os, os << "\n"; // Reverse order as typically Bayes nets stored in reverse topological sort. - for (auto conditional : boost::adaptors::reverse(*this)) { + for (auto it = std::make_reverse_iterator(this->end()); + it != std::make_reverse_iterator(this->begin()); ++it) { + const auto& conditional = *it; auto frontals = conditional->frontals(); const Key me = frontals.front(); auto parents = conditional->parents(); diff --git a/gtsam/inference/BayesTree-inst.h b/gtsam/inference/BayesTree-inst.h index 8e771708f..8d3712624 100644 --- a/gtsam/inference/BayesTree-inst.h +++ b/gtsam/inference/BayesTree-inst.h @@ -361,7 +361,7 @@ namespace gtsam { } // Factor into C1\B | B. sharedFactorGraph temp_remaining; - boost::tie(p_C1_B, temp_remaining) = + std::tie(p_C1_B, temp_remaining) = FactorGraphType(p_C1_Bred).eliminatePartialMultifrontal(Ordering(C1_minus_B), function); } std::shared_ptr p_C2_B; { @@ -373,7 +373,7 @@ namespace gtsam { } // Factor into C2\B | B. sharedFactorGraph temp_remaining; - boost::tie(p_C2_B, temp_remaining) = + std::tie(p_C2_B, temp_remaining) = FactorGraphType(p_C2_Bred).eliminatePartialMultifrontal(Ordering(C2_minus_B), function); } gttoc(Full_root_factoring); diff --git a/gtsam/inference/Conditional.h b/gtsam/inference/Conditional.h index 9f08e5623..a45e3df23 100644 --- a/gtsam/inference/Conditional.h +++ b/gtsam/inference/Conditional.h @@ -70,12 +70,33 @@ namespace gtsam { /// Typedef to this class typedef Conditional This; + public: + /** A mini implementation of an iterator range, to share const + * views of frontals and parents. */ + typedef std::pair ConstFactorRange; + struct ConstFactorRangeIterator { + ConstFactorRange range_; + // Delete default constructor + ConstFactorRangeIterator() = delete; + ConstFactorRangeIterator(ConstFactorRange const& x) : range_(x) {} + // Implement begin and end for iteration + typename FACTOR::const_iterator begin() const { return range_.first; } + typename FACTOR::const_iterator end() const { return range_.second; } + size_t size() const { return std::distance(range_.first, range_.second); } + const auto& front() const { return *begin(); } + // == operator overload for comparison with another iterator + template + bool operator==(const OTHER& rhs) const { + return std::equal(begin(), end(), rhs.begin()); + } + }; + /** View of the frontal keys (call frontals()) */ - typedef boost::iterator_range Frontals; + typedef ConstFactorRangeIterator Frontals; /** View of the separator keys (call parents()) */ - typedef boost::iterator_range Parents; + typedef ConstFactorRangeIterator Parents; protected: /// @name Standard Constructors @@ -121,10 +142,10 @@ namespace gtsam { } /** return a view of the frontal keys */ - Frontals frontals() const { return boost::make_iterator_range(beginFrontals(), endFrontals()); } + Frontals frontals() const { return ConstFactorRangeIterator({beginFrontals(), endFrontals()});} /** return a view of the parent keys */ - Parents parents() const { return boost::make_iterator_range(beginParents(), endParents()); } + Parents parents() const { return ConstFactorRangeIterator({beginParents(), endParents()}); } /** * All conditional types need to implement a `logProbability` function, for which diff --git a/gtsam/inference/EliminateableFactorGraph-inst.h b/gtsam/inference/EliminateableFactorGraph-inst.h index 558acbfe8..730d22176 100644 --- a/gtsam/inference/EliminateableFactorGraph-inst.h +++ b/gtsam/inference/EliminateableFactorGraph-inst.h @@ -20,7 +20,6 @@ #include #include -#include namespace gtsam { @@ -75,7 +74,7 @@ namespace gtsam { EliminationTreeType etree(asDerived(), (*variableIndex).get(), ordering); std::shared_ptr bayesNet; std::shared_ptr factorGraph; - boost::tie(bayesNet,factorGraph) = etree.eliminate(function); + std::tie(bayesNet,factorGraph) = etree.eliminate(function); // If any factors are remaining, the ordering was incomplete if(!factorGraph->empty()) throw InconsistentEliminationRequested(); @@ -139,7 +138,7 @@ namespace gtsam { JunctionTreeType junctionTree(etree); std::shared_ptr bayesTree; std::shared_ptr factorGraph; - boost::tie(bayesTree,factorGraph) = junctionTree.eliminate(function); + std::tie(bayesTree,factorGraph) = junctionTree.eliminate(function); // If any factors are remaining, the ordering was incomplete if(!factorGraph->empty()) throw InconsistentEliminationRequested(); @@ -277,7 +276,7 @@ namespace gtsam { // in the order requested. std::shared_ptr bayesTree; std::shared_ptr factorGraph; - boost::tie(bayesTree,factorGraph) = + std::tie(bayesTree,factorGraph) = eliminatePartialMultifrontal(marginalizedVariableOrdering, function, variableIndex); if(const Ordering* varsAsOrdering = boost::get(&variables)) @@ -344,7 +343,7 @@ namespace gtsam { // in the order requested. std::shared_ptr bayesTree; std::shared_ptr factorGraph; - boost::tie(bayesTree,factorGraph) = + std::tie(bayesTree,factorGraph) = eliminatePartialMultifrontal(marginalizedVariableOrdering, function, variableIndex); if(const Ordering* varsAsOrdering = boost::get(&variables)) diff --git a/gtsam/inference/JunctionTree-inst.h b/gtsam/inference/JunctionTree-inst.h index 86be13760..c607857c5 100644 --- a/gtsam/inference/JunctionTree-inst.h +++ b/gtsam/inference/JunctionTree-inst.h @@ -85,7 +85,7 @@ struct ConstructorTraversalData { keyAsOrdering.push_back(ETreeNode->key); SymbolicConditional::shared_ptr myConditional; SymbolicFactor::shared_ptr mySeparatorFactor; - boost::tie(myConditional, mySeparatorFactor) = internal::EliminateSymbolic( + std::tie(myConditional, mySeparatorFactor) = internal::EliminateSymbolic( symbolicFactors, keyAsOrdering); // Store symbolic elimination results in the parent diff --git a/gtsam/inference/VariableSlots.h b/gtsam/inference/VariableSlots.h index a665890c2..ebdf7ecea 100644 --- a/gtsam/inference/VariableSlots.h +++ b/gtsam/inference/VariableSlots.h @@ -24,7 +24,6 @@ #include #include -#include #include #include @@ -111,7 +110,7 @@ VariableSlots::VariableSlots(const FG& factorGraph) // the array entry for each factor that will indicate the factor // does not involve the variable. iterator thisVarSlots; bool inserted; - boost::tie(thisVarSlots, inserted) = this->insert(std::make_pair(involvedVariable, FastVector())); + std::tie(thisVarSlots, inserted) = this->insert(std::make_pair(involvedVariable, FastVector())); if(inserted) thisVarSlots->second.resize(factorGraph.nrFactors(), Empty); thisVarSlots->second[jointFactorPos] = factorVarSlot; diff --git a/gtsam/inference/graph-inl.h b/gtsam/inference/graph-inl.h index 93bb2a51c..ceae2e3ab 100644 --- a/gtsam/inference/graph-inl.h +++ b/gtsam/inference/graph-inl.h @@ -54,7 +54,7 @@ std::list predecessorMap2Keys(const PredecessorMap& p_map) { SGraph g; SVertex root; std::map key2vertex; - boost::tie(g, root, key2vertex) = gtsam::predecessorMap2Graph, SVertex, KEY>(p_map); + std::tie(g, root, key2vertex) = gtsam::predecessorMap2Graph, SVertex, KEY>(p_map); // breadth first visit on the graph std::list keys; @@ -114,7 +114,7 @@ SDGraph toBoostGraph(const G& graph) { /* ************************************************************************* */ template -boost::tuple > +std::tuple > predecessorMap2Graph(const PredecessorMap& p_map) { G g; @@ -146,7 +146,7 @@ predecessorMap2Graph(const PredecessorMap& p_map) { if (!foundRoot) throw std::invalid_argument("predecessorMap2Graph: invalid predecessor map!"); else - return boost::tuple >(g, root, key2vertex); + return std::tuple >(g, root, key2vertex); } /* ************************************************************************* */ @@ -185,7 +185,7 @@ std::shared_ptr composePoses(const G& graph, const PredecessorMap& PoseGraph g; PoseVertex root; std::map key2vertex; - boost::tie(g, root, key2vertex) = + std::tie(g, root, key2vertex) = predecessorMap2Graph(tree); // attach the relative poses to the edges @@ -207,8 +207,8 @@ std::shared_ptr composePoses(const G& graph, const PredecessorMap& PoseVertex v2 = key2vertex.find(key2)->second; POSE l1Xl2 = factor->measured(); - boost::tie(edge12, found1) = boost::edge(v1, v2, g); - boost::tie(edge21, found2) = boost::edge(v2, v1, g); + std::tie(edge12, found1) = boost::edge(v1, v2, g); + std::tie(edge21, found2) = boost::edge(v2, v1, g); if (found1 && found2) throw std::invalid_argument ("composePoses: invalid spanning tree"); if (!found1 && !found2) continue; if (found1) diff --git a/gtsam/inference/graph.h b/gtsam/inference/graph.h index 9f5f4c9bc..988c79b74 100644 --- a/gtsam/inference/graph.h +++ b/gtsam/inference/graph.h @@ -83,7 +83,7 @@ namespace gtsam { * V = Vertex type */ template - boost::tuple > predecessorMap2Graph(const PredecessorMap& p_map); + std::tuple > predecessorMap2Graph(const PredecessorMap& p_map); /** * Compose the poses by following the chain specified by the spanning tree diff --git a/gtsam/linear/Errors.cpp b/gtsam/linear/Errors.cpp index 92f7bb4b8..d36b647ce 100644 --- a/gtsam/linear/Errors.cpp +++ b/gtsam/linear/Errors.cpp @@ -17,7 +17,6 @@ * @author Christian Potthast */ -#include #include #include @@ -28,7 +27,7 @@ namespace gtsam { /* ************************************************************************* */ Errors createErrors(const VectorValues& V) { Errors result; - for (const Vector& e : V | boost::adaptors::map_values) { + for (const auto& [key, e] : V) { result.push_back(e); } return result; diff --git a/gtsam/linear/GaussianBayesNet.cpp b/gtsam/linear/GaussianBayesNet.cpp index 2e62d2d42..b04878ac5 100644 --- a/gtsam/linear/GaussianBayesNet.cpp +++ b/gtsam/linear/GaussianBayesNet.cpp @@ -20,8 +20,8 @@ #include #include -#include #include +#include using namespace std; using namespace gtsam; @@ -50,11 +50,11 @@ namespace gtsam { VectorValues solution = given; // (R*x)./sigmas = y by solving x=inv(R)*(y.*sigmas) // solve each node in reverse topological sort order (parents first) - for (auto cg : boost::adaptors::reverse(*this)) { + for (auto it = std::make_reverse_iterator(end()); it != std::make_reverse_iterator(begin()); ++it) { // i^th part of R*x=y, x=inv(R)*y // (Rii*xi + R_i*x(i+1:))./si = yi => // xi = inv(Rii)*(yi.*si - R_i*x(i+1:)) - solution.insert(cg->solve(solution)); + solution.insert((*it)->solve(solution)); } return solution; } @@ -69,8 +69,8 @@ namespace gtsam { std::mt19937_64* rng) const { VectorValues result(given); // sample each node in reverse topological sort order (parents first) - for (auto cg : boost::adaptors::reverse(*this)) { - const VectorValues sampled = cg->sample(result, rng); + for (auto it = std::make_reverse_iterator(end()); it != std::make_reverse_iterator(begin()); ++it) { + const VectorValues sampled = (*it)->sample(result, rng); result.insert(sampled); } return result; @@ -131,8 +131,8 @@ namespace gtsam { VectorValues result; // TODO this looks pretty sketchy. result is passed as the parents argument // as it's filled up by solving the gaussian conditionals. - for (auto cg: boost::adaptors::reverse(*this)) { - result.insert(cg->solveOtherRHS(result, rhs)); + for (auto it = std::make_reverse_iterator(end()); it != std::make_reverse_iterator(begin()); ++it) { + result.insert((*it)->solveOtherRHS(result, rhs)); } return result; } diff --git a/gtsam/linear/HessianFactor.cpp b/gtsam/linear/HessianFactor.cpp index 194a7ca53..0302d8c6e 100644 --- a/gtsam/linear/HessianFactor.cpp +++ b/gtsam/linear/HessianFactor.cpp @@ -30,19 +30,12 @@ #include #include -#include -#include -#include -#include #include #include +#include "gtsam/base/Vector.h" using namespace std; -namespace br { -using namespace boost::range; -using namespace boost::adaptors; -} namespace gtsam { @@ -144,12 +137,20 @@ namespace { DenseIndex _getSizeHF(const Vector& m) { return m.size(); } + +std::vector _getSizeHFVec(const std::vector& m) { + std::vector dims; + for (const Vector& v : m) { + dims.push_back(v.size()); + } + return dims; +} } /* ************************************************************************* */ HessianFactor::HessianFactor(const KeyVector& js, const std::vector& Gs, const std::vector& gs, double f) : - GaussianFactor(js), info_(gs | br::transformed(&_getSizeHF), true) { + GaussianFactor(js), info_(_getSizeHFVec(gs), true) { // Get the number of variables size_t variable_count = js.size(); @@ -417,7 +418,7 @@ void HessianFactor::multiplyHessianAdd(double alpha, const VectorValues& x, for (DenseIndex i = 0; i < (DenseIndex) size(); ++i) { bool didNotExist; VectorValues::iterator it; - boost::tie(it, didNotExist) = yvalues.tryInsert(keys_[i], Vector()); + std::tie(it, didNotExist) = yvalues.tryInsert(keys_[i], Vector()); if (didNotExist) it->second = alpha * y[i]; // init else diff --git a/gtsam/linear/IterativeSolver.h b/gtsam/linear/IterativeSolver.h index 654e2ea12..c4a719436 100644 --- a/gtsam/linear/IterativeSolver.h +++ b/gtsam/linear/IterativeSolver.h @@ -21,7 +21,6 @@ #include #include -#include #include #include diff --git a/gtsam/linear/JacobianFactor.cpp b/gtsam/linear/JacobianFactor.cpp index 6f8bd99f9..9f6b8da16 100644 --- a/gtsam/linear/JacobianFactor.cpp +++ b/gtsam/linear/JacobianFactor.cpp @@ -32,10 +32,6 @@ #include #include -#include -#include -#include -#include #include #include @@ -102,7 +98,7 @@ JacobianFactor::JacobianFactor(const HessianFactor& factor) // Do Cholesky to get a Jacobian size_t maxrank; bool success; - boost::tie(maxrank, success) = choleskyCareful(Ab_.matrix()); + std::tie(maxrank, success) = choleskyCareful(Ab_.matrix()); // Check that Cholesky succeeded OR it managed to factor the full Hessian. // THe latter case occurs with non-positive definite matrices arising from QP. @@ -122,7 +118,7 @@ JacobianFactor::JacobianFactor(const HessianFactor& factor) /* ************************************************************************* */ // Helper functions for combine constructor namespace { -boost::tuple, DenseIndex, DenseIndex> _countDims( +std::tuple, DenseIndex, DenseIndex> _countDims( const FastVector& factors, const FastVector& variableSlots) { gttic(countDims); @@ -188,7 +184,7 @@ boost::tuple, DenseIndex, DenseIndex> _countDims( } #endif - return boost::make_tuple(varDims, m, n); + return std::make_tuple(varDims, m, n); } /* ************************************************************************* */ @@ -221,16 +217,16 @@ void JacobianFactor::JacobianFactorHelper(const GaussianFactorGraph& graph, // Count dimensions FastVector varDims; DenseIndex m, n; - boost::tie(varDims, m, n) = _countDims(jacobians, orderedSlots); + std::tie(varDims, m, n) = _countDims(jacobians, orderedSlots); // Allocate matrix and copy keys in order gttic(allocate); Ab_ = VerticalBlockMatrix(varDims, m, true); // Allocate augmented matrix Base::keys_.resize(orderedSlots.size()); - boost::range::copy( - // Get variable keys - orderedSlots | boost::adaptors::indirected | boost::adaptors::map_keys, - Base::keys_.begin()); + // Copy keys in order + std::transform(orderedSlots.begin(), orderedSlots.end(), + Base::keys_.begin(), + [](const VariableSlots::const_iterator& it) {return it->first;}); gttoc(allocate); // Loop over slots in combined factor and copy blocks from source factors diff --git a/gtsam/linear/NoiseModel.cpp b/gtsam/linear/NoiseModel.cpp index 32dd1c535..02e44e2a8 100644 --- a/gtsam/linear/NoiseModel.cpp +++ b/gtsam/linear/NoiseModel.cpp @@ -480,7 +480,7 @@ SharedDiagonal Constrained::QR(Matrix& Ab) const { const size_t maxRank = min(m, n); // create storage for [R d] - typedef boost::tuple Triple; + typedef std::tuple Triple; list Rd; Matrix rd(1, n + 1); // and for row of R @@ -506,7 +506,7 @@ SharedDiagonal Constrained::QR(Matrix& Ab) const { rd = Ab.row(*constraint_row); // Construct solution (r, d, sigma) - Rd.push_back(boost::make_tuple(j, rd, kInfinity)); + Rd.push_back(std::make_tuple(j, rd, kInfinity)); // exit after rank exhausted if (Rd.size() >= maxRank) @@ -552,7 +552,7 @@ SharedDiagonal Constrained::QR(Matrix& Ab) const { rd.block(0, j + 1, 1, n - j) = pseudo.transpose() * Ab.block(0, j + 1, m, n - j); // construct solution (r, d, sigma) - Rd.push_back(boost::make_tuple(j, rd, precision)); + Rd.push_back(std::make_tuple(j, rd, precision)); } else { // If precision is zero, no information on this column // This is actually not limited to constraints, could happen in Gaussian::QR @@ -577,9 +577,9 @@ SharedDiagonal Constrained::QR(Matrix& Ab) const { bool mixed = false; Ab.setZero(); // make sure we don't look below for (const Triple& t: Rd) { - const size_t& j = t.get<0>(); - const Matrix& rd = t.get<1>(); - precisions(i) = t.get<2>(); + const size_t& j = std::get<0>(t); + const Matrix& rd = std::get<1>(t); + precisions(i) = std::get<2>(t); if (std::isinf(precisions(i))) mixed = true; Ab.block(i, j, 1, n + 1 - j) = rd.block(0, j, 1, n + 1 - j); diff --git a/gtsam/linear/Preconditioner.cpp b/gtsam/linear/Preconditioner.cpp index cb7e36503..84837760c 100644 --- a/gtsam/linear/Preconditioner.cpp +++ b/gtsam/linear/Preconditioner.cpp @@ -14,7 +14,6 @@ #include #include #include -#include #include #include @@ -145,8 +144,9 @@ void BlockJacobiPreconditioner::build( /* getting the block diagonals over the factors */ std::map hessianMap =gfg.hessianBlockDiagonal(); - for (const Matrix& hessian: hessianMap | boost::adaptors::map_values) + for (const auto& [key, hessian]: hessianMap) { blocks.push_back(hessian); + } /* if necessary, allocating the memory for cacheing the factorization results */ if ( nnz > bufferSize_ ) { diff --git a/gtsam/linear/SubgraphPreconditioner.cpp b/gtsam/linear/SubgraphPreconditioner.cpp index b0a84bc2e..ece8d13d4 100644 --- a/gtsam/linear/SubgraphPreconditioner.cpp +++ b/gtsam/linear/SubgraphPreconditioner.cpp @@ -25,7 +25,6 @@ #include #include -#include #include @@ -205,7 +204,8 @@ void SubgraphPreconditioner::solve(const Vector &y, Vector &x) const { assert(x.size() == y.size()); /* back substitute */ - for (const auto &cg : boost::adaptors::reverse(Rc1_)) { + for (auto it = std::make_reverse_iterator(Rc1_.end()); it != std::make_reverse_iterator(Rc1_.begin()); ++it) { + auto& cg = *it; /* collect a subvector of x that consists of the parents of cg (S) */ const KeyVector parentKeys(cg->beginParents(), cg->endParents()); const KeyVector frontalKeys(cg->beginFrontals(), cg->endFrontals()); diff --git a/gtsam/linear/VectorValues.cpp b/gtsam/linear/VectorValues.cpp index 04792b6ba..cfa0ed9a2 100644 --- a/gtsam/linear/VectorValues.cpp +++ b/gtsam/linear/VectorValues.cpp @@ -19,20 +19,12 @@ #include #include -#include #include -#include -#include using namespace std; namespace gtsam { - using boost::combine; - using boost::adaptors::transformed; - using boost::adaptors::map_values; - using boost::accumulate; - /* ************************************************************************ */ VectorValues::VectorValues(const VectorValues& first, const VectorValues& second) { @@ -46,12 +38,8 @@ namespace gtsam { /* ************************************************************************ */ VectorValues::VectorValues(const Vector& x, const Dims& dims) { - using Pair = pair; size_t j = 0; - for (const Pair& v : dims) { - Key key; - size_t n; - boost::tie(key, n) = v; + for (const auto& [key,n] : dims) { #ifdef TBB_GREATER_EQUAL_2020 values_.emplace(key, x.segment(j, n)); #else @@ -78,11 +66,11 @@ namespace gtsam { VectorValues VectorValues::Zero(const VectorValues& other) { VectorValues result; - for(const KeyValuePair& v: other) + for(const auto& [key,value]: other) #ifdef TBB_GREATER_EQUAL_2020 - result.values_.emplace(v.first, Vector::Zero(v.second.size())); + result.values_.emplace(key, Vector::Zero(value.size())); #else - result.values_.insert(std::make_pair(v.first, Vector::Zero(v.second.size()))); + result.values_.insert(std::make_pair(key, Vector::Zero(value.size()))); #endif return result; } @@ -100,18 +88,18 @@ namespace gtsam { /* ************************************************************************ */ VectorValues& VectorValues::update(const VectorValues& values) { iterator hint = begin(); - for (const KeyValuePair& key_value : values) { + for (const auto& [key,value] : values) { // Use this trick to find the value using a hint, since we are inserting // from another sorted map size_t oldSize = values_.size(); - hint = values_.insert(hint, key_value); + hint = values_.emplace_hint(hint, key, value); if (values_.size() > oldSize) { values_.unsafe_erase(hint); throw out_of_range( "Requested to update a VectorValues with another VectorValues that " "contains keys not present in the first."); } else { - hint->second = key_value.second; + hint->second = value; } } return *this; @@ -131,8 +119,9 @@ namespace gtsam { /* ************************************************************************ */ void VectorValues::setZero() { - for(Vector& v: values_ | map_values) - v.setZero(); + for(auto& [key, value] : *this) { + value.setZero(); + } } /* ************************************************************************ */ @@ -140,16 +129,15 @@ namespace gtsam { // Change print depending on whether we are using TBB #ifdef GTSAM_USE_TBB map sorted; - for (const auto& key_value : v) { - sorted.emplace(key_value.first, key_value.second); + for (const auto& [key,value] : v) { + sorted.emplace(key, value); } - for (const auto& key_value : sorted) + for (const auto& [key,value] : sorted) #else - for (const auto& key_value : v) + for (const auto& [key,value] : v) #endif { - os << " " << StreamedKey(key_value.first) << ": " << key_value.second.transpose() - << "\n"; + os << " " << StreamedKey(key) << ": " << value.transpose() << "\n"; } return os; } @@ -166,9 +154,11 @@ namespace gtsam { bool VectorValues::equals(const VectorValues& x, double tol) const { if(this->size() != x.size()) return false; - for(const auto values: boost::combine(*this, x)) { - if(values.get<0>().first != values.get<1>().first || - !equal_with_abs_tol(values.get<0>().second, values.get<1>().second, tol)) + auto this_it = this->begin(); + auto x_it = x.begin(); + for(; this_it != this->end(); ++this_it, ++x_it) { + if(this_it->first != x_it->first || + !equal_with_abs_tol(this_it->second, x_it->second, tol)) return false; } return true; @@ -178,14 +168,15 @@ namespace gtsam { Vector VectorValues::vector() const { // Count dimensions DenseIndex totalDim = 0; - for (const Vector& v : *this | map_values) totalDim += v.size(); + for (const auto& [key, value] : *this) + totalDim += value.size(); // Copy vectors Vector result(totalDim); DenseIndex pos = 0; - for (const Vector& v : *this | map_values) { - result.segment(pos, v.size()) = v; - pos += v.size(); + for (const auto& [key, value] : *this) { + result.segment(pos, value.size()) = value; + pos += value.size(); } return result; @@ -196,7 +187,7 @@ namespace gtsam { { // Count dimensions DenseIndex totalDim = 0; - for(size_t dim: keys | map_values) + for (const auto& [key, dim] : keys) totalDim += dim; Vector result(totalDim); size_t j = 0; @@ -215,19 +206,19 @@ namespace gtsam { /* ************************************************************************ */ namespace internal { - bool structureCompareOp(const boost::tuple& vv) + bool structureCompareOp(const VectorValues::value_type& a, const VectorValues::value_type& b) { - return vv.get<0>().first == vv.get<1>().first - && vv.get<0>().second.size() == vv.get<1>().second.size(); + return a.first == b.first && a.second.size() == b.second.size(); } } /* ************************************************************************ */ bool VectorValues::hasSameStructure(const VectorValues other) const { - return accumulate(combine(*this, other) - | transformed(internal::structureCompareOp), true, logical_and()); + // compare the "other" container with this one, using the structureCompareOp + // and then return true if all elements are compared as equal + return std::equal(this->begin(), this->end(), other.begin(), other.end(), + internal::structureCompareOp); } /* ************************************************************************ */ @@ -236,14 +227,14 @@ namespace gtsam { if(this->size() != v.size()) throw invalid_argument("VectorValues::dot called with a VectorValues of different structure"); double result = 0.0; - typedef boost::tuple ValuePair; - using boost::adaptors::map_values; - for(const ValuePair values: boost::combine(*this, v)) { - assert_throw(values.get<0>().first == values.get<1>().first, - invalid_argument("VectorValues::dot called with a VectorValues of different structure")); - assert_throw(values.get<0>().second.size() == values.get<1>().second.size(), - invalid_argument("VectorValues::dot called with a VectorValues of different structure")); - result += values.get<0>().second.dot(values.get<1>().second); + auto this_it = this->begin(); + auto v_it = v.begin(); + for(; this_it != this->end(); ++this_it, ++v_it) { + assert_throw(this_it->first == v_it->first, + invalid_argument("VectorValues::dot called with a VectorValues of different structure")); + assert_throw(this_it->second.size() == v_it->second.size(), + invalid_argument("VectorValues::dot called with a VectorValues of different structure")); + result += this_it->second.dot(v_it->second); } return result; } @@ -256,9 +247,9 @@ namespace gtsam { /* ************************************************************************ */ double VectorValues::squaredNorm() const { double sumSquares = 0.0; - using boost::adaptors::map_values; - for(const Vector& v: *this | map_values) - sumSquares += v.squaredNorm(); + for(const auto& [key, value]: *this) { + sumSquares += value.squaredNorm(); + } return sumSquares; } @@ -372,8 +363,9 @@ namespace gtsam { /* ************************************************************************ */ VectorValues& VectorValues::operator*=(double alpha) { - for(Vector& v: *this | map_values) - v *= alpha; + for (auto& [key, value]: *this) { + value *= alpha; + } return *this; } diff --git a/gtsam/linear/tests/testGaussianBayesNet.cpp b/gtsam/linear/tests/testGaussianBayesNet.cpp index ec208fa7b..b849f2d82 100644 --- a/gtsam/linear/tests/testGaussianBayesNet.cpp +++ b/gtsam/linear/tests/testGaussianBayesNet.cpp @@ -24,7 +24,6 @@ #include #include -#include #include // STL/C++ @@ -52,7 +51,7 @@ static GaussianBayesNet noisyBayesNet = { TEST( GaussianBayesNet, Matrix ) { Matrix R; Vector d; - boost::tie(R,d) = smallBayesNet.matrix(); // find matrix and RHS + std::tie(R,d) = smallBayesNet.matrix(); // find matrix and RHS Matrix R1 = (Matrix2() << 1.0, 1.0, @@ -102,7 +101,7 @@ TEST(GaussianBayesNet, Evaluate2) { TEST( GaussianBayesNet, NoisyMatrix ) { Matrix R; Vector d; - boost::tie(R,d) = noisyBayesNet.matrix(); // find matrix and RHS + std::tie(R,d) = noisyBayesNet.matrix(); // find matrix and RHS Matrix R1 = (Matrix2() << 0.5, 0.5, @@ -126,7 +125,7 @@ TEST(GaussianBayesNet, Optimize) { TEST(GaussianBayesNet, NoisyOptimize) { Matrix R; Vector d; - boost::tie(R, d) = noisyBayesNet.matrix(); // find matrix and RHS + std::tie(R, d) = noisyBayesNet.matrix(); // find matrix and RHS const Vector x = R.inverse() * d; const VectorValues expected{{_x_, x.head(1)}, {_y_, x.tail(1)}}; @@ -239,7 +238,7 @@ TEST( GaussianBayesNet, MatrixStress ) const Ordering ordering(keys); Matrix R; Vector d; - boost::tie(R, d) = bn.matrix(ordering); + std::tie(R, d) = bn.matrix(ordering); EXPECT(assert_equal(expected.vector(ordering), R.inverse() * d)); } } diff --git a/gtsam/linear/tests/testGaussianFactorGraph.cpp b/gtsam/linear/tests/testGaussianFactorGraph.cpp index 2ed4e6589..d078ddf20 100644 --- a/gtsam/linear/tests/testGaussianFactorGraph.cpp +++ b/gtsam/linear/tests/testGaussianFactorGraph.cpp @@ -157,7 +157,7 @@ TEST(GaussianFactorGraph, matrices) { Vector b = Ab.col(Ab.cols() - 1); Matrix actualA; Vector actualb; - boost::tie(actualA, actualb) = gfg.jacobian(); + std::tie(actualA, actualb) = gfg.jacobian(); EXPECT(assert_equal(A, actualA)); EXPECT(assert_equal(b, actualb)); @@ -166,7 +166,7 @@ TEST(GaussianFactorGraph, matrices) { Vector eta = A.transpose() * b; Matrix actualL; Vector actualeta; - boost::tie(actualL, actualeta) = gfg.hessian(); + std::tie(actualL, actualeta) = gfg.hessian(); EXPECT(assert_equal(L, actualL)); EXPECT(assert_equal(eta, actualeta)); @@ -247,7 +247,7 @@ TEST(GaussianFactorGraph, eliminate_empty) { gfg.add(JacobianFactor()); GaussianBayesNet::shared_ptr actualBN; GaussianFactorGraph::shared_ptr remainingGFG; - boost::tie(actualBN, remainingGFG) = gfg.eliminatePartialSequential(Ordering()); + std::tie(actualBN, remainingGFG) = gfg.eliminatePartialSequential(Ordering()); // expected Bayes net is empty GaussianBayesNet expectedBN; @@ -265,10 +265,10 @@ TEST(GaussianFactorGraph, matrices2) { GaussianFactorGraph gfg = createSimpleGaussianFactorGraph(); Matrix A; Vector b; - boost::tie(A, b) = gfg.jacobian(); + std::tie(A, b) = gfg.jacobian(); Matrix AtA; Vector eta; - boost::tie(AtA, eta) = gfg.hessian(); + std::tie(AtA, eta) = gfg.hessian(); EXPECT(assert_equal(A.transpose() * A, AtA)); EXPECT(assert_equal(A.transpose() * b, eta)); Matrix expectedAtA(6, 6); @@ -316,7 +316,7 @@ TEST(GaussianFactorGraph, multiplyHessianAdd2) { // brute force Matrix AtA; Vector eta; - boost::tie(AtA, eta) = gfg.hessian(); + std::tie(AtA, eta) = gfg.hessian(); Vector X(6); X << 1, 2, 3, 4, 5, 6; Vector Y(6); @@ -343,10 +343,10 @@ TEST(GaussianFactorGraph, matricesMixed) { GaussianFactorGraph gfg = createGaussianFactorGraphWithHessianFactor(); Matrix A; Vector b; - boost::tie(A, b) = gfg.jacobian(); // incorrect ! + std::tie(A, b) = gfg.jacobian(); // incorrect ! Matrix AtA; Vector eta; - boost::tie(AtA, eta) = gfg.hessian(); // correct + std::tie(AtA, eta) = gfg.hessian(); // correct EXPECT(assert_equal(A.transpose() * A, AtA)); Vector expected = -(Vector(6) << -25, 17.5, 5, -13.5, 29, 4).finished(); EXPECT(assert_equal(expected, eta)); diff --git a/gtsam/linear/tests/testHessianFactor.cpp b/gtsam/linear/tests/testHessianFactor.cpp index 5fd6e833f..8821dfdd3 100644 --- a/gtsam/linear/tests/testHessianFactor.cpp +++ b/gtsam/linear/tests/testHessianFactor.cpp @@ -295,13 +295,13 @@ TEST(HessianFactor, CombineAndEliminate1) { Ordering ordering {1}; GaussianConditional::shared_ptr expectedConditional; JacobianFactor::shared_ptr expectedFactor; - boost::tie(expectedConditional, expectedFactor) = jacobian.eliminate(ordering); + std::tie(expectedConditional, expectedFactor) = jacobian.eliminate(ordering); CHECK(expectedFactor); // Eliminate GaussianConditional::shared_ptr actualConditional; HessianFactor::shared_ptr actualHessian; - boost::tie(actualConditional, actualHessian) = // + std::tie(actualConditional, actualHessian) = // EliminateCholesky(gfg, ordering); actualConditional->setModel(false,Vector3(1,1,1)); // add a unit model for comparison @@ -358,13 +358,13 @@ TEST(HessianFactor, CombineAndEliminate2) { Ordering ordering {0}; GaussianConditional::shared_ptr expectedConditional; JacobianFactor::shared_ptr expectedFactor; - boost::tie(expectedConditional, expectedFactor) = // + std::tie(expectedConditional, expectedFactor) = // jacobian.eliminate(ordering); // Eliminate GaussianConditional::shared_ptr actualConditional; HessianFactor::shared_ptr actualHessian; - boost::tie(actualConditional, actualHessian) = // + std::tie(actualConditional, actualHessian) = // EliminateCholesky(gfg, ordering); actualConditional->setModel(false,Vector3(1,1,1)); // add a unit model for comparison @@ -498,7 +498,7 @@ TEST(HessianFactor, gradientAtZero) // test gradient at zero VectorValues expectedG{{0, -g1}, {1, -g2}}; - Matrix A; Vector b; boost::tie(A,b) = factor.jacobian(); + Matrix A; Vector b; std::tie(A,b) = factor.jacobian(); KeyVector keys {0, 1}; EXPECT(assert_equal(-A.transpose()*b, expectedG.vector(keys))); VectorValues actualG = factor.gradientAtZero(); diff --git a/gtsam/linear/tests/testJacobianFactor.cpp b/gtsam/linear/tests/testJacobianFactor.cpp index ad722eb94..9597b8f53 100644 --- a/gtsam/linear/tests/testJacobianFactor.cpp +++ b/gtsam/linear/tests/testJacobianFactor.cpp @@ -26,7 +26,6 @@ #include #include -#include using namespace std; using namespace gtsam; @@ -36,8 +35,8 @@ using Dims = std::vector; // For constructing block matrices namespace { namespace simple { // Terms we'll use - const vector > terms{ - {5, I_3x3}, {10, 2 * I_3x3}, {15, 3 * I_3x3}}; + using Terms = vector >; + const Terms terms{{5, I_3x3}, {10, 2 * I_3x3}, {15, 3 * I_3x3}}; // RHS and sigmas const Vector b = Vector3(1., 2., 3.); @@ -54,8 +53,7 @@ TEST(JacobianFactor, constructors_and_accessors) // Test for using different numbers of terms { // b vector only constructor - JacobianFactor expected( - boost::make_iterator_range(terms.begin(), terms.begin()), b); + JacobianFactor expected(Terms{}, b); JacobianFactor actual(b); EXPECT(assert_equal(expected, actual)); EXPECT(assert_equal(b, expected.getb())); @@ -65,8 +63,7 @@ TEST(JacobianFactor, constructors_and_accessors) } { // One term constructor - JacobianFactor expected( - boost::make_iterator_range(terms.begin(), terms.begin() + 1), b, noise); + JacobianFactor expected(Terms{terms[0]}, b, noise); JacobianFactor actual(terms[0].first, terms[0].second, b, noise); EXPECT(assert_equal(expected, actual)); LONGS_EQUAL((long)terms[0].first, (long)actual.keys().back()); @@ -78,8 +75,7 @@ TEST(JacobianFactor, constructors_and_accessors) } { // Two term constructor - JacobianFactor expected( - boost::make_iterator_range(terms.begin(), terms.begin() + 2), b, noise); + JacobianFactor expected(Terms{terms[0], terms[1]}, b, noise); JacobianFactor actual(terms[0].first, terms[0].second, terms[1].first, terms[1].second, b, noise); EXPECT(assert_equal(expected, actual)); @@ -92,8 +88,7 @@ TEST(JacobianFactor, constructors_and_accessors) } { // Three term constructor - JacobianFactor expected( - boost::make_iterator_range(terms.begin(), terms.begin() + 3), b, noise); + JacobianFactor expected(Terms{terms[0], terms[1], terms[2]}, b, noise); JacobianFactor actual(terms[0].first, terms[0].second, terms[1].first, terms[1].second, terms[2].first, terms[2].second, b, noise); EXPECT(assert_equal(expected, actual)); @@ -106,8 +101,7 @@ TEST(JacobianFactor, constructors_and_accessors) } { // Test three-term constructor with std::map - JacobianFactor expected( - boost::make_iterator_range(terms.begin(), terms.begin() + 3), b, noise); + JacobianFactor expected(Terms{terms[0], terms[1], terms[2]}, b, noise); map mapTerms; // note order of insertion plays no role: order will be determined by keys mapTerms.insert(terms[2]); @@ -124,14 +118,17 @@ TEST(JacobianFactor, constructors_and_accessors) } { // VerticalBlockMatrix constructor - JacobianFactor expected( - boost::make_iterator_range(terms.begin(), terms.begin() + 3), b, noise); + JacobianFactor expected(Terms{terms[0], terms[1], terms[2]}, b, noise); VerticalBlockMatrix blockMatrix(Dims{3, 3, 3, 1}, 3); blockMatrix(0) = terms[0].second; blockMatrix(1) = terms[1].second; blockMatrix(2) = terms[2].second; blockMatrix(3) = b; - JacobianFactor actual(terms | boost::adaptors::map_keys, blockMatrix, noise); + // get a vector of keys from the terms + vector keys; + for (const auto& term : terms) + keys.push_back(term.first); + JacobianFactor actual(keys, blockMatrix, noise); EXPECT(assert_equal(expected, actual)); LONGS_EQUAL((long)terms[2].first, (long)actual.keys().back()); EXPECT(assert_equal(terms[2].second, actual.getA(actual.end() - 1))); @@ -371,7 +368,7 @@ TEST(JacobianFactor, operators ) EXPECT(assert_equal(expectedX, actualX)); // test gradient at zero - Matrix A; Vector b2; boost::tie(A,b2) = lf.jacobian(); + Matrix A; Vector b2; std::tie(A,b2) = lf.jacobian(); VectorValues expectedG; expectedG.insert(1, Vector2(20,-10)); expectedG.insert(2, Vector2(-20, 10)); diff --git a/gtsam/linear/tests/testRegularJacobianFactor.cpp b/gtsam/linear/tests/testRegularJacobianFactor.cpp index 205d9d092..a21bbf982 100644 --- a/gtsam/linear/tests/testRegularJacobianFactor.cpp +++ b/gtsam/linear/tests/testRegularJacobianFactor.cpp @@ -24,9 +24,6 @@ #include -#include -#include - using namespace std; using namespace gtsam; diff --git a/gtsam/linear/tests/testVectorValues.cpp b/gtsam/linear/tests/testVectorValues.cpp index 49fc56d19..a4e101658 100644 --- a/gtsam/linear/tests/testVectorValues.cpp +++ b/gtsam/linear/tests/testVectorValues.cpp @@ -21,12 +21,9 @@ #include -#include - #include using namespace std; -using boost::adaptors::map_keys; using namespace gtsam; /* ************************************************************************* */ diff --git a/gtsam/navigation/ManifoldPreintegration.cpp b/gtsam/navigation/ManifoldPreintegration.cpp index 526bf6c73..c0c917d9c 100644 --- a/gtsam/navigation/ManifoldPreintegration.cpp +++ b/gtsam/navigation/ManifoldPreintegration.cpp @@ -68,7 +68,7 @@ void ManifoldPreintegration::update(const Vector3& measuredAcc, // Possibly correct for sensor pose Matrix3 D_correctedAcc_acc, D_correctedAcc_omega, D_correctedOmega_omega; if (p().body_P_sensor) - boost::tie(acc, omega) = correctMeasurementsBySensorPose(acc, omega, + std::tie(acc, omega) = correctMeasurementsBySensorPose(acc, omega, D_correctedAcc_acc, D_correctedAcc_omega, D_correctedOmega_omega); // Save current rotation for updating Jacobians diff --git a/gtsam/navigation/TangentPreintegration.cpp b/gtsam/navigation/TangentPreintegration.cpp index e8579daba..a472b2cfd 100644 --- a/gtsam/navigation/TangentPreintegration.cpp +++ b/gtsam/navigation/TangentPreintegration.cpp @@ -112,7 +112,7 @@ void TangentPreintegration::update(const Vector3& measuredAcc, // Possibly correct for sensor pose by converting to body frame Matrix3 D_correctedAcc_acc, D_correctedAcc_omega, D_correctedOmega_omega; if (p().body_P_sensor) - boost::tie(acc, omega) = correctMeasurementsBySensorPose(acc, omega, + std::tie(acc, omega) = correctMeasurementsBySensorPose(acc, omega, D_correctedAcc_acc, D_correctedAcc_omega, D_correctedOmega_omega); // Do update diff --git a/gtsam/navigation/tests/testGPSFactor.cpp b/gtsam/navigation/tests/testGPSFactor.cpp index 741ec4dfd..c3c6bb2d2 100644 --- a/gtsam/navigation/tests/testGPSFactor.cpp +++ b/gtsam/navigation/tests/testGPSFactor.cpp @@ -125,7 +125,7 @@ TEST(GPSData, init) { // Estimate initial state Pose3 T; Vector3 nV; - boost::tie(T, nV) = GPSFactor::EstimateState(t1, NED1, t2, NED2, 84831.0796); + std::tie(T, nV) = GPSFactor::EstimateState(t1, NED1, t2, NED2, 84831.0796); // Check values values EXPECT(assert_equal((Vector )Vector3(29.9575, -29.0564, -1.95993), nV, 1e-4)); diff --git a/gtsam/nonlinear/Expression-inl.h b/gtsam/nonlinear/Expression-inl.h index 9fece59bb..e933be5e1 100644 --- a/gtsam/nonlinear/Expression-inl.h +++ b/gtsam/nonlinear/Expression-inl.h @@ -28,11 +28,6 @@ #include -#include -#include -#include -#include - namespace gtsam { template @@ -150,7 +145,7 @@ T Expression::value(const Values& values, // Call private version that returns derivatives in H KeyVector keys; FastVector dims; - boost::tie(keys, dims) = keysAndDims(); + std::tie(keys, dims) = keysAndDims(); return valueAndDerivatives(values, keys, dims, *H); } else // no derivatives needed, just return value @@ -235,8 +230,13 @@ typename Expression::KeysAndDims Expression::keysAndDims() const { dims(map); size_t n = map.size(); KeysAndDims pair = std::make_pair(KeyVector(n), FastVector(n)); - boost::copy(map | boost::adaptors::map_keys, pair.first.begin()); - boost::copy(map | boost::adaptors::map_values, pair.second.begin()); + // Copy map into pair of vectors + auto key_it = pair.first.begin(); + auto dim_it = pair.second.begin(); + for (const auto& [key, value] : map) { + *key_it++ = key; + *dim_it++ = value; + } return pair; } diff --git a/gtsam/nonlinear/ExpressionFactor.h b/gtsam/nonlinear/ExpressionFactor.h index 8c4e8ce86..ada4609b9 100644 --- a/gtsam/nonlinear/ExpressionFactor.h +++ b/gtsam/nonlinear/ExpressionFactor.h @@ -180,7 +180,7 @@ protected: if (keys_.empty()) { // This is the case when called in ExpressionFactor Constructor. // We then take the keys from the expression in sorted order. - boost::tie(keys_, dims_) = expression_.keysAndDims(); + std::tie(keys_, dims_) = expression_.keysAndDims(); } else { // This happens with classes derived from BinaryExpressionFactor etc. // In that case, the keys_ are already defined and we just need to grab diff --git a/gtsam/nonlinear/ISAM2-impl.cpp b/gtsam/nonlinear/ISAM2-impl.cpp index a809cbc08..20874e2dc 100644 --- a/gtsam/nonlinear/ISAM2-impl.cpp +++ b/gtsam/nonlinear/ISAM2-impl.cpp @@ -22,7 +22,6 @@ #include // for selective linearization thresholds #include -#include #include #include #include diff --git a/gtsam/nonlinear/ISAM2-impl.h b/gtsam/nonlinear/ISAM2-impl.h index 8c0af2c2d..ad53b7972 100644 --- a/gtsam/nonlinear/ISAM2-impl.h +++ b/gtsam/nonlinear/ISAM2-impl.h @@ -28,13 +28,6 @@ #include #include -#include -#include -namespace br { -using namespace boost::range; -using namespace boost::adaptors; -} // namespace br - #include #include #include diff --git a/gtsam/nonlinear/ISAM2.cpp b/gtsam/nonlinear/ISAM2.cpp index dafe938c6..7f2dfa6e8 100644 --- a/gtsam/nonlinear/ISAM2.cpp +++ b/gtsam/nonlinear/ISAM2.cpp @@ -176,9 +176,11 @@ void ISAM2::recalculateBatch(const ISAM2UpdateParams& updateParams, gttic(recalculateBatch); gttic(add_keys); - br::copy(variableIndex_ | br::map_keys, - std::inserter(*affectedKeysSet, affectedKeysSet->end())); + // copy the keys from the variableIndex_ to the affectedKeysSet + for (const auto& [key, _] : variableIndex_) { + affectedKeysSet->insert(key); + } // Removed unused keys: VariableIndex affectedFactorsVarIndex = variableIndex_; diff --git a/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp b/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp index 5cbed2a1f..3c0624986 100644 --- a/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp +++ b/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp @@ -29,7 +29,6 @@ #include #include -#include #include #include @@ -41,7 +40,6 @@ using namespace std; namespace gtsam { -using boost::adaptors::map_values; typedef internal::LevenbergMarquardtState State; /* ************************************************************************* */ @@ -281,8 +279,8 @@ GaussianFactorGraph::shared_ptr LevenbergMarquardtOptimizer::iterate() { VectorValues sqrtHessianDiagonal; if (params_.diagonalDamping) { sqrtHessianDiagonal = linear->hessianDiagonal(); - for (Vector& v : sqrtHessianDiagonal | map_values) { - v = v.cwiseMax(params_.minDiagonal).cwiseMin(params_.maxDiagonal).cwiseSqrt(); + for (auto& [key, value] : sqrtHessianDiagonal) { + value = value.cwiseMax(params_.minDiagonal).cwiseMin(params_.maxDiagonal).cwiseSqrt(); } } diff --git a/gtsam/nonlinear/LevenbergMarquardtParams.cpp b/gtsam/nonlinear/LevenbergMarquardtParams.cpp index caa2cc083..32de3ffbe 100644 --- a/gtsam/nonlinear/LevenbergMarquardtParams.cpp +++ b/gtsam/nonlinear/LevenbergMarquardtParams.cpp @@ -20,7 +20,6 @@ #include #include -#include #include #include diff --git a/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.cpp b/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.cpp index 54db42b79..fbf8062f6 100644 --- a/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.cpp +++ b/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.cpp @@ -65,7 +65,7 @@ NonlinearConjugateGradientOptimizer::System::State NonlinearConjugateGradientOpt GaussianFactorGraph::shared_ptr NonlinearConjugateGradientOptimizer::iterate() { Values newValues; int dummy; - boost::tie(newValues, dummy) = nonlinearConjugateGradient( + std::tie(newValues, dummy) = nonlinearConjugateGradient( System(graph_), state_->values, params_, true /* single iteration */); state_.reset(new State(newValues, graph_.error(newValues), state_->iterations + 1)); @@ -78,7 +78,7 @@ const Values& NonlinearConjugateGradientOptimizer::optimize() { System system(graph_); Values newValues; int iterations; - boost::tie(newValues, iterations) = + std::tie(newValues, iterations) = nonlinearConjugateGradient(system, state_->values, params_, false); state_.reset(new State(std::move(newValues), graph_.error(newValues), iterations)); return state_->values; diff --git a/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h b/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h index 780d41635..1480dea55 100644 --- a/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h +++ b/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h @@ -20,7 +20,6 @@ #include #include -#include namespace gtsam { @@ -145,7 +144,7 @@ double lineSearch(const S &system, const V currentValues, const W &gradient) { * The last parameter is a switch between gradient-descent and conjugate gradient */ template -boost::tuple nonlinearConjugateGradient(const S &system, +std::tuple nonlinearConjugateGradient(const S &system, const V &initial, const NonlinearOptimizerParams ¶ms, const bool singleIteration, const bool gradientDescent = false) { @@ -160,7 +159,7 @@ boost::tuple nonlinearConjugateGradient(const S &system, std::cout << "Exiting, as error = " << currentError << " < " << params.errorTol << std::endl; } - return boost::tie(initial, iteration); + return std::tie(initial, iteration); } V currentValues = initial; @@ -218,7 +217,7 @@ boost::tuple nonlinearConjugateGradient(const S &system, << "nonlinearConjugateGradient: Terminating because reached maximum iterations" << std::endl; - return boost::tie(currentValues, iteration); + return std::tie(currentValues, iteration); } } // \ namespace gtsam diff --git a/gtsam/nonlinear/Values.h b/gtsam/nonlinear/Values.h index ba0252c98..be2df9056 100644 --- a/gtsam/nonlinear/Values.h +++ b/gtsam/nonlinear/Values.h @@ -105,8 +105,11 @@ namespace gtsam { typedef KeyValuePair value_type; + /// @name Constructors + /// @{ + /** Default constructor creates an empty Values class */ - Values() {} + Values() = default; /** Copy constructor duplicates all keys and values */ Values(const Values& other); @@ -124,6 +127,7 @@ namespace gtsam { /** Construct from a Values and an update vector: identical to other.retract(delta) */ Values(const Values& other, const VectorValues& delta); + /// @} /// @name Testable /// @{ @@ -134,6 +138,8 @@ namespace gtsam { bool equals(const Values& other, double tol=1e-9) const; /// @} + /// @name Standard Interface + /// @{ /** Retrieve a variable by key \c j. The type of the value associated with * this key is supplied as a template argument to this function. @@ -174,6 +180,42 @@ namespace gtsam { /** whether the config is empty */ bool empty() const { return values_.empty(); } + /// @} + /// @name Iterator + /// @{ + + struct deref_iterator { + using const_iterator_type = typename KeyValueMap::const_iterator; + const_iterator_type it_; + deref_iterator(const_iterator_type it) : it_(it) {} + ConstKeyValuePair operator*() const { return {it_->first, *(it_->second)}; } + std::unique_ptr operator->() { + return std::make_unique(it_->first, *(it_->second)); + } + bool operator==(const deref_iterator& other) const { + return it_ == other.it_; + } + bool operator!=(const deref_iterator& other) const { return it_ != other.it_; } + deref_iterator& operator++() { + ++it_; + return *this; + } + }; + + deref_iterator begin() const { return deref_iterator(values_.begin()); } + deref_iterator end() const { return deref_iterator(values_.end()); } + + /** Find an element by key, returning an iterator, or end() if the key was + * not found. */ + deref_iterator find(Key j) const { return deref_iterator(values_.find(j)); } + + /** Find the element greater than or equal to the specified key. */ + deref_iterator lower_bound(Key j) const { return deref_iterator(values_.lower_bound(j)); } + + /** Find the lowest-ordered element greater than the specified key. */ + deref_iterator upper_bound(Key j) const { return deref_iterator(values_.upper_bound(j)); } + + /// @} /// @name Manifold Operations /// @{ diff --git a/gtsam/nonlinear/tests/testValues.cpp b/gtsam/nonlinear/tests/testValues.cpp index 02a3374e4..1ad7277dc 100644 --- a/gtsam/nonlinear/tests/testValues.cpp +++ b/gtsam/nonlinear/tests/testValues.cpp @@ -194,11 +194,31 @@ TEST(Values, basic_functions) values.insert(6, M1); values.insert(8, M2); + EXPECT(!values.exists(1)); EXPECT(values.exists(2)); EXPECT(values.exists(4)); EXPECT(values.exists(6)); EXPECT(values.exists(8)); + + size_t count = 0; + for (const auto& [key, value] : values) { + count += 1; + if (key == 2 || key == 4) EXPECT_LONGS_EQUAL(3, value.dim()); + if (key == 6 || key == 8) EXPECT_LONGS_EQUAL(6, value.dim()); + } + EXPECT_LONGS_EQUAL(4, count); + + // find + EXPECT_LONGS_EQUAL(4, values.find(4)->key); + + // lower_bound + EXPECT_LONGS_EQUAL(4, values.lower_bound(4)->key); + EXPECT_LONGS_EQUAL(4, values.lower_bound(3)->key); + + // upper_bound + EXPECT_LONGS_EQUAL(6, values.upper_bound(4)->key); + EXPECT_LONGS_EQUAL(4, values.upper_bound(3)->key); } /* ************************************************************************* */ diff --git a/gtsam/slam/JacobianFactorQR.h b/gtsam/slam/JacobianFactorQR.h index 5d59f3c62..a10dae1a0 100644 --- a/gtsam/slam/JacobianFactorQR.h +++ b/gtsam/slam/JacobianFactorQR.h @@ -48,7 +48,7 @@ public: GaussianFactorGraph::shared_ptr fg; KeyVector variables; variables.push_back(pointKey); - boost::tie(bn, fg) = gfg.eliminatePartialSequential(variables, EliminateQR); + std::tie(bn, fg) = gfg.eliminatePartialSequential(variables, EliminateQR); //fg->print("fg"); JacobianFactor::operator=(JacobianFactor(*fg)); diff --git a/gtsam/slam/tests/testDataset.cpp b/gtsam/slam/tests/testDataset.cpp index 9413ae4bd..af4d7612f 100644 --- a/gtsam/slam/tests/testDataset.cpp +++ b/gtsam/slam/tests/testDataset.cpp @@ -94,7 +94,7 @@ TEST(dataSet, load2D) { const string filename = findExampleDataFile("w100.graph"); NonlinearFactorGraph::shared_ptr graph; Values::shared_ptr initial; - boost::tie(graph, initial) = load2D(filename); + std::tie(graph, initial) = load2D(filename); EXPECT_LONGS_EQUAL(300, graph->size()); EXPECT_LONGS_EQUAL(100, initial->size()); auto model = noiseModel::Unit::Create(3); @@ -139,13 +139,13 @@ TEST(dataSet, load2DVictoriaPark) { Values::shared_ptr initial; // Load all - boost::tie(graph, initial) = load2D(filename); + std::tie(graph, initial) = load2D(filename); EXPECT_LONGS_EQUAL(10608, graph->size()); EXPECT_LONGS_EQUAL(7120, initial->size()); // Restrict keys size_t maxIndex = 5; - boost::tie(graph, initial) = load2D(filename, nullptr, maxIndex); + std::tie(graph, initial) = load2D(filename, nullptr, maxIndex); EXPECT_LONGS_EQUAL(5, graph->size()); EXPECT_LONGS_EQUAL(6, initial->size()); // file has 0 as well EXPECT_LONGS_EQUAL(L(5), graph->at(4)->keys()[1]); @@ -221,7 +221,7 @@ TEST(dataSet, readG2o3D) { NonlinearFactorGraph::shared_ptr actualGraph; Values::shared_ptr actualValues; bool is3D = true; - boost::tie(actualGraph, actualValues) = readG2o(g2oFile, is3D); + std::tie(actualGraph, actualValues) = readG2o(g2oFile, is3D); EXPECT(assert_equal(expectedGraph, *actualGraph, 1e-5)); for (size_t j : {0, 1, 2, 3, 4}) { EXPECT(assert_equal(poses[j], actualValues->at(j), 1e-5)); @@ -235,7 +235,7 @@ TEST( dataSet, readG2o3DNonDiagonalNoise) NonlinearFactorGraph::shared_ptr actualGraph; Values::shared_ptr actualValues; bool is3D = true; - boost::tie(actualGraph, actualValues) = readG2o(g2oFile, is3D); + std::tie(actualGraph, actualValues) = readG2o(g2oFile, is3D); Values expectedValues; Rot3 R0 = Rot3::Quaternion(1.000000, 0.000000, 0.000000, 0.000000 ); @@ -329,7 +329,7 @@ TEST(dataSet, readG2o) { const string g2oFile = findExampleDataFile("pose2example"); NonlinearFactorGraph::shared_ptr actualGraph; Values::shared_ptr actualValues; - boost::tie(actualGraph, actualValues) = readG2o(g2oFile); + std::tie(actualGraph, actualValues) = readG2o(g2oFile); auto model = noiseModel::Diagonal::Precisions( Vector3(44.721360, 44.721360, 30.901699)); @@ -356,7 +356,7 @@ TEST(dataSet, readG2oHuber) { NonlinearFactorGraph::shared_ptr actualGraph; Values::shared_ptr actualValues; bool is3D = false; - boost::tie(actualGraph, actualValues) = + std::tie(actualGraph, actualValues) = readG2o(g2oFile, is3D, KernelFunctionTypeHUBER); auto baseModel = noiseModel::Diagonal::Precisions( @@ -373,7 +373,7 @@ TEST(dataSet, readG2oTukey) { NonlinearFactorGraph::shared_ptr actualGraph; Values::shared_ptr actualValues; bool is3D = false; - boost::tie(actualGraph, actualValues) = + std::tie(actualGraph, actualValues) = readG2o(g2oFile, is3D, KernelFunctionTypeTUKEY); auto baseModel = noiseModel::Diagonal::Precisions( @@ -390,14 +390,14 @@ TEST( dataSet, writeG2o) const string g2oFile = findExampleDataFile("pose2example"); NonlinearFactorGraph::shared_ptr expectedGraph; Values::shared_ptr expectedValues; - boost::tie(expectedGraph, expectedValues) = readG2o(g2oFile); + std::tie(expectedGraph, expectedValues) = readG2o(g2oFile); const string filenameToWrite = createRewrittenFileName(g2oFile); writeG2o(*expectedGraph, *expectedValues, filenameToWrite); NonlinearFactorGraph::shared_ptr actualGraph; Values::shared_ptr actualValues; - boost::tie(actualGraph, actualValues) = readG2o(filenameToWrite); + std::tie(actualGraph, actualValues) = readG2o(filenameToWrite); EXPECT(assert_equal(*expectedValues,*actualValues,1e-5)); EXPECT(assert_equal(*expectedGraph,*actualGraph,1e-5)); } @@ -409,14 +409,14 @@ TEST( dataSet, writeG2o3D) NonlinearFactorGraph::shared_ptr expectedGraph; Values::shared_ptr expectedValues; bool is3D = true; - boost::tie(expectedGraph, expectedValues) = readG2o(g2oFile, is3D); + std::tie(expectedGraph, expectedValues) = readG2o(g2oFile, is3D); const string filenameToWrite = createRewrittenFileName(g2oFile); writeG2o(*expectedGraph, *expectedValues, filenameToWrite); NonlinearFactorGraph::shared_ptr actualGraph; Values::shared_ptr actualValues; - boost::tie(actualGraph, actualValues) = readG2o(filenameToWrite, is3D); + std::tie(actualGraph, actualValues) = readG2o(filenameToWrite, is3D); EXPECT(assert_equal(*expectedValues,*actualValues,1e-4)); EXPECT(assert_equal(*expectedGraph,*actualGraph,1e-4)); } @@ -428,14 +428,14 @@ TEST( dataSet, writeG2o3DNonDiagonalNoise) NonlinearFactorGraph::shared_ptr expectedGraph; Values::shared_ptr expectedValues; bool is3D = true; - boost::tie(expectedGraph, expectedValues) = readG2o(g2oFile, is3D); + std::tie(expectedGraph, expectedValues) = readG2o(g2oFile, is3D); const string filenameToWrite = createRewrittenFileName(g2oFile); writeG2o(*expectedGraph, *expectedValues, filenameToWrite); NonlinearFactorGraph::shared_ptr actualGraph; Values::shared_ptr actualValues; - boost::tie(actualGraph, actualValues) = readG2o(filenameToWrite, is3D); + std::tie(actualGraph, actualValues) = readG2o(filenameToWrite, is3D); EXPECT(assert_equal(*expectedValues,*actualValues,1e-4)); EXPECT(assert_equal(*expectedGraph,*actualGraph,1e-4)); } diff --git a/gtsam/slam/tests/testInitializePose.cpp b/gtsam/slam/tests/testInitializePose.cpp index 1373cb576..ec53effc7 100644 --- a/gtsam/slam/tests/testInitializePose.cpp +++ b/gtsam/slam/tests/testInitializePose.cpp @@ -36,7 +36,7 @@ TEST(InitializePose3, computePoses2D) { NonlinearFactorGraph::shared_ptr inputGraph; Values::shared_ptr posesInFile; bool is3D = false; - boost::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D); + std::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D); auto priorModel = noiseModel::Unit::Create(3); inputGraph->addPrior(0, posesInFile->at(0), priorModel); @@ -59,7 +59,7 @@ TEST(InitializePose3, computePoses3D) { NonlinearFactorGraph::shared_ptr inputGraph; Values::shared_ptr posesInFile; bool is3D = true; - boost::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D); + std::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D); auto priorModel = noiseModel::Unit::Create(6); inputGraph->addPrior(0, posesInFile->at(0), priorModel); diff --git a/gtsam/slam/tests/testInitializePose3.cpp b/gtsam/slam/tests/testInitializePose3.cpp index 37d65320a..37997ab0e 100644 --- a/gtsam/slam/tests/testInitializePose3.cpp +++ b/gtsam/slam/tests/testInitializePose3.cpp @@ -234,7 +234,7 @@ TEST( InitializePose3, orientationsGradient ) { NonlinearFactorGraph::shared_ptr matlabGraph; Values::shared_ptr matlabValues; bool is3D = true; - boost::tie(matlabGraph, matlabValues) = readG2o(matlabResultsfile, is3D); + std::tie(matlabGraph, matlabValues) = readG2o(matlabResultsfile, is3D); Rot3 R0Expected = matlabValues->at(1).rotation(); EXPECT(assert_equal(R0Expected, orientations.at(x0), 1e-4)); @@ -269,7 +269,7 @@ TEST(InitializePose3, initializePoses) { NonlinearFactorGraph::shared_ptr inputGraph; Values::shared_ptr posesInFile; bool is3D = true; - boost::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D); + std::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D); auto priorModel = noiseModel::Unit::Create(6); inputGraph->addPrior(0, Pose3(), priorModel); diff --git a/gtsam/slam/tests/testLago.cpp b/gtsam/slam/tests/testLago.cpp index fda39c169..43049994b 100644 --- a/gtsam/slam/tests/testLago.cpp +++ b/gtsam/slam/tests/testLago.cpp @@ -260,7 +260,7 @@ TEST( Lago, largeGraphNoisy_orientations ) { string inputFile = findExampleDataFile("noisyToyGraph"); NonlinearFactorGraph::shared_ptr g; Values::shared_ptr initial; - boost::tie(g, initial) = readG2o(inputFile); + std::tie(g, initial) = readG2o(inputFile); // Add prior on the pose having index (key) = 0 NonlinearFactorGraph graphWithPrior = *g; @@ -281,7 +281,7 @@ TEST( Lago, largeGraphNoisy_orientations ) { string matlabFile = findExampleDataFile("orientationsNoisyToyGraph"); NonlinearFactorGraph::shared_ptr gmatlab; Values::shared_ptr expected; - boost::tie(gmatlab, expected) = readG2o(matlabFile); + std::tie(gmatlab, expected) = readG2o(matlabFile); for(const auto& key_pose: expected->extract()){ const Key& k = key_pose.first; @@ -296,7 +296,7 @@ TEST( Lago, largeGraphNoisy ) { string inputFile = findExampleDataFile("noisyToyGraph"); NonlinearFactorGraph::shared_ptr g; Values::shared_ptr initial; - boost::tie(g, initial) = readG2o(inputFile); + std::tie(g, initial) = readG2o(inputFile); // Add prior on the pose having index (key) = 0 NonlinearFactorGraph graphWithPrior = *g; @@ -308,7 +308,7 @@ TEST( Lago, largeGraphNoisy ) { string matlabFile = findExampleDataFile("optimizedNoisyToyGraph"); NonlinearFactorGraph::shared_ptr gmatlab; Values::shared_ptr expected; - boost::tie(gmatlab, expected) = readG2o(matlabFile); + std::tie(gmatlab, expected) = readG2o(matlabFile); for(const auto& key_pose: expected->extract()){ const Key& k = key_pose.first; diff --git a/gtsam/slam/tests/testRegularImplicitSchurFactor.cpp b/gtsam/slam/tests/testRegularImplicitSchurFactor.cpp index 9f39082a5..98809e959 100644 --- a/gtsam/slam/tests/testRegularImplicitSchurFactor.cpp +++ b/gtsam/slam/tests/testRegularImplicitSchurFactor.cpp @@ -27,8 +27,6 @@ #include #include -#include -#include #include using namespace std; diff --git a/gtsam/symbolic/tests/testSymbolicBayesTree.cpp b/gtsam/symbolic/tests/testSymbolicBayesTree.cpp index d14523719..6051fa95f 100644 --- a/gtsam/symbolic/tests/testSymbolicBayesTree.cpp +++ b/gtsam/symbolic/tests/testSymbolicBayesTree.cpp @@ -22,11 +22,10 @@ #include #include -#include -using boost::adaptors::indirected; - #include #include +#include +#include using namespace std; using namespace gtsam; @@ -34,6 +33,24 @@ using namespace gtsam::symbol_shorthand; static bool debug = false; +// Given a vector of shared pointers infer the type of the pointed-to objects +template +using PointedToType = std::decay_t().begin())>; + +// Given a vector of shared pointers infer the type of the pointed-to objects +template +using ValuesVector = std::vector>; + +// Return a vector of dereferenced values +template +ValuesVector deref(const T& v) { + ValuesVector result; + for (auto& t : v) + result.push_back(*t); + return result; +} + + /* ************************************************************************* */ TEST(SymbolicBayesTree, clear) { SymbolicBayesTree bayesTree = asiaBayesTree; @@ -111,8 +128,7 @@ TEST(BayesTree, removePath) { bayesTree.removePath(bayesTree[_C_], &bn, &orphans); SymbolicFactorGraph factors(bn); CHECK(assert_equal(expected, factors)); - CHECK(assert_container_equal(expectedOrphans | indirected, - orphans | indirected)); + CHECK(assert_container_equal(deref(expectedOrphans), deref(orphans))); bayesTree = bayesTreeOrig; @@ -127,8 +143,7 @@ TEST(BayesTree, removePath) { bayesTree.removePath(bayesTree[_E_], &bn2, &orphans2); SymbolicFactorGraph factors2(bn2); CHECK(assert_equal(expected2, factors2)); - CHECK(assert_container_equal(expectedOrphans2 | indirected, - orphans2 | indirected)); + CHECK(assert_container_equal(deref(expectedOrphans2), deref(orphans2))); } /* ************************************************************************* */ @@ -147,8 +162,7 @@ TEST(BayesTree, removePath2) { CHECK(assert_equal(expected, factors)); SymbolicBayesTree::Cliques expectedOrphans{bayesTree[_S_], bayesTree[_T_], bayesTree[_X_]}; - CHECK(assert_container_equal(expectedOrphans | indirected, - orphans | indirected)); + CHECK(assert_container_equal(deref(expectedOrphans), deref(orphans))); } /* ************************************************************************* */ @@ -167,8 +181,7 @@ TEST(BayesTree, removePath3) { expected.emplace_shared(_T_, _E_, _L_); CHECK(assert_equal(expected, factors)); SymbolicBayesTree::Cliques expectedOrphans{bayesTree[_S_], bayesTree[_X_]}; - CHECK(assert_container_equal(expectedOrphans | indirected, - orphans | indirected)); + CHECK(assert_container_equal(deref(expectedOrphans), deref(orphans))); } void getAllCliques(const SymbolicBayesTree::sharedClique& subtree, @@ -249,8 +262,7 @@ TEST(BayesTree, removeTop) { CHECK(assert_equal(expected, bn)); SymbolicBayesTree::Cliques expectedOrphans{bayesTree[_T_], bayesTree[_X_]}; - CHECK(assert_container_equal(expectedOrphans | indirected, - orphans | indirected)); + CHECK(assert_container_equal(deref(expectedOrphans), deref(orphans))); // Try removeTop again with a factor that should not change a thing // std::shared_ptr newFactor2(new IndexFactor(_B_)); @@ -261,8 +273,7 @@ TEST(BayesTree, removeTop) { SymbolicFactorGraph expected2; CHECK(assert_equal(expected2, factors2)); SymbolicBayesTree::Cliques expectedOrphans2; - CHECK(assert_container_equal(expectedOrphans2 | indirected, - orphans2 | indirected)); + CHECK(assert_container_equal(deref(expectedOrphans2), deref(orphans2))); } /* ************************************************************************* */ @@ -286,8 +297,7 @@ TEST(BayesTree, removeTop2) { CHECK(assert_equal(expected, bn)); SymbolicBayesTree::Cliques expectedOrphans{bayesTree[_S_], bayesTree[_X_]}; - CHECK(assert_container_equal(expectedOrphans | indirected, - orphans | indirected)); + CHECK(assert_container_equal(deref(expectedOrphans), deref(orphans))); } /* ************************************************************************* */ diff --git a/gtsam/symbolic/tests/testSymbolicFactor.cpp b/gtsam/symbolic/tests/testSymbolicFactor.cpp index 646b80ad6..1263a240a 100644 --- a/gtsam/symbolic/tests/testSymbolicFactor.cpp +++ b/gtsam/symbolic/tests/testSymbolicFactor.cpp @@ -21,7 +21,6 @@ #include #include -#include using namespace std; using namespace gtsam; @@ -77,7 +76,7 @@ TEST(SymbolicFactor, EliminateSymbolic) SymbolicFactor::shared_ptr actualFactor; SymbolicConditional::shared_ptr actualConditional; - boost::tie(actualConditional, actualFactor) = + std::tie(actualConditional, actualFactor) = EliminateSymbolic(factors, Ordering{0, 1, 2, 3}); CHECK(assert_equal(expectedConditional, *actualConditional)); diff --git a/gtsam/symbolic/tests/testSymbolicFactorGraph.cpp b/gtsam/symbolic/tests/testSymbolicFactorGraph.cpp index 6c8924be4..96d22c8c4 100644 --- a/gtsam/symbolic/tests/testSymbolicFactorGraph.cpp +++ b/gtsam/symbolic/tests/testSymbolicFactorGraph.cpp @@ -67,7 +67,7 @@ TEST(SymbolicFactorGraph, eliminatePartialSequential) { SymbolicBayesNet::shared_ptr actualBayesNet; SymbolicFactorGraph::shared_ptr actualSfg; - boost::tie(actualBayesNet, actualSfg) = + std::tie(actualBayesNet, actualSfg) = simpleTestGraph2.eliminatePartialSequential(Ordering{0, 1}); EXPECT(assert_equal(expectedSfg, *actualSfg)); @@ -75,7 +75,7 @@ TEST(SymbolicFactorGraph, eliminatePartialSequential) { SymbolicBayesNet::shared_ptr actualBayesNet2; SymbolicFactorGraph::shared_ptr actualSfg2; - boost::tie(actualBayesNet2, actualSfg2) = + std::tie(actualBayesNet2, actualSfg2) = simpleTestGraph2.eliminatePartialSequential(Ordering{0, 1}); EXPECT(assert_equal(expectedSfg, *actualSfg2)); @@ -108,7 +108,7 @@ TEST(SymbolicFactorGraph, eliminatePartialMultifrontal) { SymbolicBayesTree::shared_ptr actualBayesTree; SymbolicFactorGraph::shared_ptr actualFactorGraph; - boost::tie(actualBayesTree, actualFactorGraph) = + std::tie(actualBayesTree, actualFactorGraph) = simpleTestGraph2.eliminatePartialMultifrontal(Ordering{4, 5}); EXPECT(assert_equal(expectedFactorGraph, *actualFactorGraph)); @@ -124,7 +124,7 @@ TEST(SymbolicFactorGraph, eliminatePartialMultifrontal) { SymbolicBayesTree::shared_ptr actualBayesTree2; SymbolicFactorGraph::shared_ptr actualFactorGraph2; - boost::tie(actualBayesTree2, actualFactorGraph2) = + std::tie(actualBayesTree2, actualFactorGraph2) = simpleTestGraph2.eliminatePartialMultifrontal(KeyVector{4, 5}); EXPECT(assert_equal(expectedFactorGraph, *actualFactorGraph2)); diff --git a/gtsam_unstable/discrete/tests/testLoopyBelief.cpp b/gtsam_unstable/discrete/tests/testLoopyBelief.cpp index 63df7d7c4..440593e7d 100644 --- a/gtsam_unstable/discrete/tests/testLoopyBelief.cpp +++ b/gtsam_unstable/discrete/tests/testLoopyBelief.cpp @@ -11,7 +11,6 @@ #include #include -#include #include #include @@ -47,7 +46,7 @@ class LoopyBelief { void print(const std::string& s = "") const { cout << s << ":" << endl; star->print("Star graph: "); - for (Key key : correctedBeliefIndices | boost::adaptors::map_keys) { + for (const auto& [key, _] : correctedBeliefIndices) { cout << "Belief factor index for " << key << ": " << correctedBeliefIndices.at(key) << endl; } @@ -71,7 +70,7 @@ class LoopyBelief { /// print void print(const std::string& s = "") const { cout << s << ":" << endl; - for (Key key : starGraphs_ | boost::adaptors::map_keys) { + for (const auto& [key, _] : starGraphs_) { starGraphs_.at(key).print((boost::format("Node %d:") % key).str()); } } @@ -85,7 +84,7 @@ class LoopyBelief { DiscreteFactorGraph::shared_ptr beliefs(new DiscreteFactorGraph()); std::map > allMessages; // Eliminate each star graph - for (Key key : starGraphs_ | boost::adaptors::map_keys) { + for (const auto& [key, _] : starGraphs_) { // cout << "***** Node " << key << "*****" << endl; // initialize belief to the unary factor from the original graph DecisionTreeFactor::shared_ptr beliefAtKey; @@ -94,15 +93,14 @@ class LoopyBelief { std::map messages; // eliminate each neighbor in this star graph one by one - for (Key neighbor : starGraphs_.at(key).correctedBeliefIndices | - boost::adaptors::map_keys) { + for (const auto& [neighbor, _] : starGraphs_.at(key).correctedBeliefIndices) { DiscreteFactorGraph subGraph; for (size_t factor : starGraphs_.at(key).varIndex_[neighbor]) { subGraph.push_back(starGraphs_.at(key).star->at(factor)); } if (debug) subGraph.print("------- Subgraph:"); DiscreteFactor::shared_ptr message; - boost::tie(dummyCond, message) = + std::tie(dummyCond, message) = EliminateDiscrete(subGraph, Ordering{neighbor}); // store the new factor into messages messages.insert(make_pair(neighbor, message)); @@ -143,11 +141,10 @@ class LoopyBelief { // Update corrected beliefs VariableIndex beliefFactors(*beliefs); - for (Key key : starGraphs_ | boost::adaptors::map_keys) { + for (const auto& [key, _] : starGraphs_) { std::map messages = allMessages[key]; - for (Key neighbor : starGraphs_.at(key).correctedBeliefIndices | - boost::adaptors::map_keys) { - DecisionTreeFactor correctedBelief = + for (const auto& [neighbor, _] : starGraphs_.at(key).correctedBeliefIndices) { + DecisionTreeFactor correctedBelief = (*std::dynamic_pointer_cast( beliefs->at(beliefFactors[key].front()))) / (*std::dynamic_pointer_cast( @@ -175,7 +172,7 @@ class LoopyBelief { const std::map& allDiscreteKeys) const { StarGraphs starGraphs; VariableIndex varIndex(graph); ///< access to all factors of each node - for (Key key : varIndex | boost::adaptors::map_keys) { + for (const auto& [key, _] : varIndex) { // initialize to multiply with other unary factors later DecisionTreeFactor::shared_ptr prodOfUnaries; diff --git a/gtsam_unstable/examples/SmartRangeExample_plaza1.cpp b/gtsam_unstable/examples/SmartRangeExample_plaza1.cpp index 37becfef1..0e2822e60 100644 --- a/gtsam_unstable/examples/SmartRangeExample_plaza1.cpp +++ b/gtsam_unstable/examples/SmartRangeExample_plaza1.cpp @@ -77,7 +77,7 @@ list readOdometry() { // load the ranges from TD // Time (sec) Sender / Antenna ID Receiver Node ID Range (m) -typedef boost::tuple RangeTriple; +typedef std::tuple RangeTriple; vector readTriples() { vector triples; string tdFile = findExampleDataFile("Plaza1_TD.txt"); @@ -165,7 +165,7 @@ int main(int argc, char** argv) { //--------------------------------- odometry loop ----------------------------------------- double t; Pose2 odometry; - boost::tie(t, odometry) = timedOdometry; + std::tie(t, odometry) = timedOdometry; printf("step %d, time = %g\n",(int)i,t); // add odometry factor @@ -179,9 +179,9 @@ int main(int argc, char** argv) { landmarkEstimates.insert(i, predictedPose); // Check if there are range factors to be added - while (k < K && t >= boost::get<0>(triples[k])) { - size_t j = boost::get<1>(triples[k]); - double range = boost::get<2>(triples[k]); + while (k < K && t >= std::get<0>(triples[k])) { + size_t j = std::get<1>(triples[k]); + double range = std::get<2>(triples[k]); if (i > start) { if (smart && totalCount < minK) { try { diff --git a/gtsam_unstable/examples/SmartRangeExample_plaza2.cpp b/gtsam_unstable/examples/SmartRangeExample_plaza2.cpp index 1e6f77b31..9a2863d68 100644 --- a/gtsam_unstable/examples/SmartRangeExample_plaza2.cpp +++ b/gtsam_unstable/examples/SmartRangeExample_plaza2.cpp @@ -76,7 +76,7 @@ list readOdometry() { // load the ranges from TD // Time (sec) Sender / Antenna ID Receiver Node ID Range (m) -typedef boost::tuple RangeTriple; +typedef std::tuple RangeTriple; vector readTriples() { vector triples; string tdFile = findExampleDataFile("Plaza2_TD.txt"); @@ -146,7 +146,7 @@ int main(int argc, char** argv) { //--------------------------------- odometry loop ----------------------------------------- double t; Pose2 odometry; - boost::tie(t, odometry) = timedOdometry; + std::tie(t, odometry) = timedOdometry; // add odometry factor newFactors.push_back( @@ -160,9 +160,9 @@ int main(int argc, char** argv) { landmarkEstimates.insert(i, predictedPose); // Check if there are range factors to be added - while (k < K && t >= boost::get<0>(triples[k])) { - size_t j = boost::get<1>(triples[k]); - double range = boost::get<2>(triples[k]); + while (k < K && t >= std::get<0>(triples[k])) { + size_t j = std::get<1>(triples[k]); + double range = std::get<2>(triples[k]); RangeFactor factor(i, symbol('L', j), range, rangeNoise); // Throw out obvious outliers based on current landmark estimates Vector error = factor.unwhitenedError(landmarkEstimates); diff --git a/gtsam_unstable/geometry/tests/testInvDepthCamera3.cpp b/gtsam_unstable/geometry/tests/testInvDepthCamera3.cpp index 11a24286a..9c314b0f0 100644 --- a/gtsam_unstable/geometry/tests/testInvDepthCamera3.cpp +++ b/gtsam_unstable/geometry/tests/testInvDepthCamera3.cpp @@ -132,7 +132,7 @@ TEST(InvDepthFactor, backproject) Vector5 actual_vec; double actual_inv; - boost::tie(actual_vec, actual_inv) = inv_camera.backproject(z, 4); + std::tie(actual_vec, actual_inv) = inv_camera.backproject(z, 4); EXPECT(assert_equal(expected,actual_vec,1e-7)); EXPECT_DOUBLES_EQUAL(inv_depth,actual_inv,1e-7); } @@ -148,7 +148,7 @@ TEST(InvDepthFactor, backproject2) Vector5 actual_vec; double actual_inv; - boost::tie(actual_vec, actual_inv) = inv_camera.backproject(z, 10); + std::tie(actual_vec, actual_inv) = inv_camera.backproject(z, 10); EXPECT(assert_equal(expected,actual_vec,1e-7)); EXPECT_DOUBLES_EQUAL(inv_depth,actual_inv,1e-7); } diff --git a/gtsam_unstable/linear/ActiveSetSolver-inl.h b/gtsam_unstable/linear/ActiveSetSolver-inl.h index cba639e5d..eeb3cd041 100644 --- a/gtsam_unstable/linear/ActiveSetSolver-inl.h +++ b/gtsam_unstable/linear/ActiveSetSolver-inl.h @@ -45,7 +45,7 @@ namespace gtsam { * * We want the minimum of all those alphas among all inactive inequality. */ -Template boost::tuple This::computeStepSize( +Template std::tuple This::computeStepSize( const InequalityFactorGraph& workingSet, const VectorValues& xk, const VectorValues& p, const double& maxAlpha) const { double minAlpha = maxAlpha; @@ -74,7 +74,7 @@ Template boost::tuple This::computeStepSize( } } } - return boost::make_tuple(minAlpha, closestFactorIx); + return std::make_tuple(minAlpha, closestFactorIx); } /******************************************************************************/ @@ -222,7 +222,7 @@ Template typename This::State This::iterate( double alpha; int factorIx; VectorValues p = newValues - state.values; - boost::tie(alpha, factorIx) = // using 16.41 + std::tie(alpha, factorIx) = // using 16.41 computeStepSize(state.workingSet, state.values, p, POLICY::maxAlpha); // also add to the working set the one that complains the most InequalityFactorGraph newWorkingSet = state.workingSet; diff --git a/gtsam_unstable/linear/ActiveSetSolver.h b/gtsam_unstable/linear/ActiveSetSolver.h index ad8367c06..52a6b1265 100644 --- a/gtsam_unstable/linear/ActiveSetSolver.h +++ b/gtsam_unstable/linear/ActiveSetSolver.h @@ -20,7 +20,6 @@ #include #include -#include namespace gtsam { @@ -114,7 +113,7 @@ protected: * If there is a blocking constraint, the closest one will be added to the * working set and become active in the next iteration. */ - boost::tuple computeStepSize( + std::tuple computeStepSize( const InequalityFactorGraph& workingSet, const VectorValues& xk, const VectorValues& p, const double& maxAlpha) const; @@ -201,4 +200,4 @@ Key maxKey(const PROBLEM& problem) { } // namespace gtsam -#include \ No newline at end of file +#include diff --git a/gtsam_unstable/linear/LPInitSolver.cpp b/gtsam_unstable/linear/LPInitSolver.cpp index 8c3df3132..9f12d670e 100644 --- a/gtsam_unstable/linear/LPInitSolver.cpp +++ b/gtsam_unstable/linear/LPInitSolver.cpp @@ -66,7 +66,7 @@ GaussianFactorGraph::shared_ptr LPInitSolver::buildInitOfInitGraph() const { // create factor ||x||^2 and add to the graph const KeyDimMap& constrainedKeyDim = lp_.constrainedKeyDimMap(); - for (Key key : constrainedKeyDim | boost::adaptors::map_keys) { + for (const auto& [key, _] : constrainedKeyDim) { size_t dim = constrainedKeyDim.at(key); initGraph->push_back( JacobianFactor(key, Matrix::Identity(dim, dim), Vector::Zero(dim))); @@ -107,4 +107,4 @@ InequalityFactorGraph LPInitSolver::addSlackVariableToInequalities( return slackInequalities; } -} \ No newline at end of file +} diff --git a/gtsam_unstable/linear/tests/testLPSolver.cpp b/gtsam_unstable/linear/tests/testLPSolver.cpp index 53c8c7618..7ef26a1c9 100644 --- a/gtsam_unstable/linear/tests/testLPSolver.cpp +++ b/gtsam_unstable/linear/tests/testLPSolver.cpp @@ -30,9 +30,6 @@ #include -#include -#include - using namespace std; using namespace gtsam; using namespace gtsam::symbol_shorthand; @@ -73,7 +70,7 @@ TEST(LPInitSolver, InfiniteLoopSingleVar) { VectorValues starter; starter.insert(1, Vector3(0, 0, 2)); VectorValues results, duals; - boost::tie(results, duals) = solver.optimize(starter); + std::tie(results, duals) = solver.optimize(starter); VectorValues expected; expected.insert(1, Vector3(13.5, 6.5, -6.5)); CHECK(assert_equal(results, expected, 1e-7)); @@ -101,7 +98,7 @@ TEST(LPInitSolver, InfiniteLoopMultiVar) { starter.insert(Y, kZero); starter.insert(Z, Vector::Constant(1, 2.0)); VectorValues results, duals; - boost::tie(results, duals) = solver.optimize(starter); + std::tie(results, duals) = solver.optimize(starter); VectorValues expected; expected.insert(X, Vector::Constant(1, 13.5)); expected.insert(Y, Vector::Constant(1, 6.5)); @@ -201,7 +198,7 @@ TEST(LPSolver, SimpleTest1) { CHECK(assert_equal(expected_x1, x1, 1e-10)); VectorValues result, duals; - boost::tie(result, duals) = lpSolver.optimize(init); + std::tie(result, duals) = lpSolver.optimize(init); VectorValues expectedResult; expectedResult.insert(1, Vector2(8. / 3., 2. / 3.)); CHECK(assert_equal(expectedResult, result, 1e-10)); @@ -213,7 +210,7 @@ TEST(LPSolver, TestWithoutInitialValues) { LPSolver lpSolver(lp); VectorValues result, duals, expectedResult; expectedResult.insert(1, Vector2(8. / 3., 2. / 3.)); - boost::tie(result, duals) = lpSolver.optimize(); + std::tie(result, duals) = lpSolver.optimize(); CHECK(assert_equal(expectedResult, result)); } diff --git a/gtsam_unstable/linear/tests/testLinearEquality.cpp b/gtsam_unstable/linear/tests/testLinearEquality.cpp index 58af66a09..781ae26c1 100644 --- a/gtsam_unstable/linear/tests/testLinearEquality.cpp +++ b/gtsam_unstable/linear/tests/testLinearEquality.cpp @@ -29,8 +29,9 @@ GTSAM_CONCEPT_TESTABLE_INST(LinearEquality) namespace { namespace simple { // Terms we'll use -const vector > terms{ - make_pair(5, I_3x3), make_pair(10, 2 * I_3x3), make_pair(15, 3 * I_3x3)}; +using Terms = vector >; +const Terms terms{make_pair(5, I_3x3), make_pair(10, 2 * I_3x3), + make_pair(15, 3 * I_3x3)}; // RHS and sigmas const Vector b = (Vector(3) << 1., 2., 3.).finished(); @@ -45,8 +46,7 @@ TEST(LinearEquality, constructors_and_accessors) { // Test for using different numbers of terms { // One term constructor - LinearEquality expected( - boost::make_iterator_range(terms.begin(), terms.begin() + 1), b, 0); + LinearEquality expected(Terms(terms.begin(), terms.begin() + 1), b, 0); LinearEquality actual(terms[0].first, terms[0].second, b, 0); EXPECT(assert_equal(expected, actual)); LONGS_EQUAL((long)terms[0].first, (long)actual.keys().back()); @@ -57,8 +57,7 @@ TEST(LinearEquality, constructors_and_accessors) { } { // Two term constructor - LinearEquality expected( - boost::make_iterator_range(terms.begin(), terms.begin() + 2), b, 0); + LinearEquality expected(Terms(terms.begin(), terms.begin() + 2), b, 0); LinearEquality actual(terms[0].first, terms[0].second, terms[1].first, terms[1].second, b, 0); EXPECT(assert_equal(expected, actual)); @@ -70,8 +69,7 @@ TEST(LinearEquality, constructors_and_accessors) { } { // Three term constructor - LinearEquality expected( - boost::make_iterator_range(terms.begin(), terms.begin() + 3), b, 0); + LinearEquality expected(Terms(terms.begin(), terms.begin() + 3), b, 0); LinearEquality actual(terms[0].first, terms[0].second, terms[1].first, terms[1].second, terms[2].first, terms[2].second, b, 0); @@ -204,7 +202,7 @@ TEST(LinearEquality, operators) { // test gradient at zero Matrix A; Vector b2; - boost::tie(A, b2) = lf.jacobian(); + std::tie(A, b2) = lf.jacobian(); VectorValues expectedG; expectedG.insert(1, (Vector(2) << 0.2, -0.1).finished()); expectedG.insert(2, (Vector(2) << -0.2, 0.1).finished()); diff --git a/gtsam_unstable/partition/FindSeparator-inl.h b/gtsam_unstable/partition/FindSeparator-inl.h index 45868e78e..d5883aff3 100644 --- a/gtsam_unstable/partition/FindSeparator-inl.h +++ b/gtsam_unstable/partition/FindSeparator-inl.h @@ -14,7 +14,6 @@ #include #include #include -#include #include #include @@ -252,7 +251,7 @@ namespace gtsam { namespace partition { // run ND on the graph size_t sepsize; sharedInts part; - boost::tie(sepsize, part) = separatorMetis(numKeys, xadj, adjncy, adjwgt, verbose); + std::tie(sepsize, part) = separatorMetis(numKeys, xadj, adjncy, adjwgt, verbose); if (!sepsize) return std::optional(); // convert the 0-1-2 from Metis to 1-2-0, so that the separator is 0, as later @@ -312,7 +311,7 @@ namespace gtsam { namespace partition { // run metis on the graph int edgecut; sharedInts part; - boost::tie(edgecut, part) = edgeMetis(numKeys, xadj, adjncy, adjwgt, verbose); + std::tie(edgecut, part) = edgeMetis(numKeys, xadj, adjncy, adjwgt, verbose); // convert the 0-1-2 from Metis to 1-2-0, so that the separator is 0, as later we will have more submaps MetisResult result; diff --git a/gtsam_unstable/partition/NestedDissection-inl.h b/gtsam_unstable/partition/NestedDissection-inl.h index c377998dd..7dbf45cdc 100644 --- a/gtsam_unstable/partition/NestedDissection-inl.h +++ b/gtsam_unstable/partition/NestedDissection-inl.h @@ -22,7 +22,7 @@ namespace gtsam { namespace partition { fg_(fg), ordering_(ordering){ GenericUnaryGraph unaryFactors; GenericGraph gfg; - boost::tie(unaryFactors, gfg) = fg.createGenericGraph(ordering); + std::tie(unaryFactors, gfg) = fg.createGenericGraph(ordering); // build reverse mapping from integer to symbol int numNodes = ordering.size(); @@ -46,7 +46,7 @@ namespace gtsam { namespace partition { const NLG& fg, const Ordering& ordering, const std::shared_ptr& cuts, const bool verbose) : fg_(fg), ordering_(ordering){ GenericUnaryGraph unaryFactors; GenericGraph gfg; - boost::tie(unaryFactors, gfg) = fg.createGenericGraph(ordering); + std::tie(unaryFactors, gfg) = fg.createGenericGraph(ordering); // build reverse mapping from integer to symbol int numNodes = ordering.size(); diff --git a/gtsam_unstable/slam/tests/testAHRS.cpp b/gtsam_unstable/slam/tests/testAHRS.cpp index d0980f452..a41732f57 100644 --- a/gtsam_unstable/slam/tests/testAHRS.cpp +++ b/gtsam_unstable/slam/tests/testAHRS.cpp @@ -74,7 +74,7 @@ TEST (AHRS, Mechanization_integrate) { AHRS ahrs = AHRS(stationaryU,stationaryF,g_e); Mechanization_bRn2 mech; KalmanFilter::State state; -// boost::tie(mech,state) = ahrs.initialize(g_e); +// std::tie(mech,state) = ahrs.initialize(g_e); // Vector u = Vector3(0.05,0.0,0.0); // double dt = 2; // Rot3 expected; diff --git a/tests/testExpressionFactor.cpp b/tests/testExpressionFactor.cpp index 567e9df5d..cc0ca5d4e 100644 --- a/tests/testExpressionFactor.cpp +++ b/tests/testExpressionFactor.cpp @@ -235,7 +235,7 @@ TEST(ExpressionFactor, Shallow) { // Get and check keys and dims KeyVector keys; FastVector dims; - boost::tie(keys, dims) = expression.keysAndDims(); + std::tie(keys, dims) = expression.keysAndDims(); LONGS_EQUAL(2,keys.size()); LONGS_EQUAL(2,dims.size()); LONGS_EQUAL(1,keys[0]); diff --git a/tests/testGaussianFactorGraphB.cpp b/tests/testGaussianFactorGraphB.cpp index a6aa7bfc5..0d8cad53d 100644 --- a/tests/testGaussianFactorGraphB.cpp +++ b/tests/testGaussianFactorGraphB.cpp @@ -26,10 +26,6 @@ #include -#include -#include -namespace br { using namespace boost::range; using namespace boost::adaptors; } - #include #include @@ -117,7 +113,7 @@ TEST(GaussianFactorGraph, eliminateOne_x1_fast) { GaussianFactorGraph fg = createGaussianFactorGraph(); GaussianConditional::shared_ptr conditional; JacobianFactor::shared_ptr remaining; - boost::tie(conditional, remaining) = EliminateQR(fg, Ordering{X(1)}); + std::tie(conditional, remaining) = EliminateQR(fg, Ordering{X(1)}); // create expected Conditional Gaussian Matrix I = 15 * I_2x2, R11 = I, S12 = -0.111111 * I, S13 = -0.444444 * I; @@ -295,7 +291,7 @@ TEST(GaussianFactorGraph, elimination) { // Check matrix Matrix R; Vector d; - boost::tie(R, d) = bayesNet.matrix(); + std::tie(R, d) = bayesNet.matrix(); Matrix expected = (Matrix(2, 2) << 0.707107, -0.353553, 0.0, 0.612372).finished(); Matrix expected2 = @@ -450,7 +446,7 @@ TEST( GaussianFactorGraph, conditional_sigma_failure) { GaussianBayesTree actBT = *lfg.eliminateMultifrontal(); // Check that all sigmas in an unconstrained bayes tree are set to one - for(const GaussianBayesTree::sharedClique& clique: actBT.nodes() | br::map_values) { + for (const auto& [key, clique]: actBT.nodes()) { GaussianConditional::shared_ptr conditional = clique->conditional(); //size_t dim = conditional->rows(); //EXPECT(assert_equal(gtsam::Vector::Ones(dim), conditional->get_model()->sigmas(), tol)); diff --git a/tests/testGaussianISAM.cpp b/tests/testGaussianISAM.cpp index 63cf654e5..053dedb93 100644 --- a/tests/testGaussianISAM.cpp +++ b/tests/testGaussianISAM.cpp @@ -22,9 +22,6 @@ #include #include -#include -namespace br { using namespace boost::adaptors; using namespace boost::range; } - using namespace std; using namespace gtsam; using namespace example; @@ -53,7 +50,7 @@ TEST( ISAM, iSAM_smoother ) GaussianBayesTree expected = *smoother.eliminateMultifrontal(ordering); // Verify sigmas in the bayes tree - for(const GaussianBayesTree::sharedClique& clique: expected.nodes() | br::map_values) { + for (const auto& [key, clique] : expected.nodes()) { GaussianConditional::shared_ptr conditional = clique->conditional(); EXPECT(!conditional->get_model()); } diff --git a/tests/testGaussianISAM2.cpp b/tests/testGaussianISAM2.cpp index 8c4238e2f..8edb29d4d 100644 --- a/tests/testGaussianISAM2.cpp +++ b/tests/testGaussianISAM2.cpp @@ -24,8 +24,6 @@ #include -#include -namespace br { using namespace boost::adaptors; using namespace boost::range; } using namespace std; using namespace gtsam; @@ -249,7 +247,7 @@ bool isam_check(const NonlinearFactorGraph& fullgraph, const Values& fullinit, c // Check gradient at each node bool nodeGradientsOk = true; typedef ISAM2::sharedClique sharedClique; - for(const sharedClique& clique: isam.nodes() | br::map_values) { + for (const auto& [key, clique] : isam.nodes()) { // Compute expected gradient GaussianFactorGraph jfg; jfg += clique->conditional(); @@ -453,7 +451,7 @@ TEST(ISAM2, swapFactors) // Check gradient at each node typedef ISAM2::sharedClique sharedClique; - for(const sharedClique& clique: isam.nodes() | br::map_values) { + for (const auto& [key, clique]: isam.nodes()) { // Compute expected gradient GaussianFactorGraph jfg; jfg += clique->conditional(); @@ -578,7 +576,7 @@ TEST(ISAM2, constrained_ordering) // Check gradient at each node typedef ISAM2::sharedClique sharedClique; - for(const sharedClique& clique: isam.nodes() | br::map_values) { + for (const auto& [key, clique]: isam.nodes()) { // Compute expected gradient GaussianFactorGraph jfg; jfg += clique->conditional(); @@ -620,9 +618,11 @@ namespace { bool checkMarginalizeLeaves(ISAM2& isam, const FastList& leafKeys) { Matrix expectedAugmentedHessian, expected3AugmentedHessian; KeyVector toKeep; - for(Key j: isam.getDelta() | br::map_keys) - if(find(leafKeys.begin(), leafKeys.end(), j) == leafKeys.end()) - toKeep.push_back(j); + for (const auto& [key, clique]: isam.getDelta()) { + if(find(leafKeys.begin(), leafKeys.end(), key) == leafKeys.end()) { + toKeep.push_back(key); + } + } // Calculate expected marginal from iSAM2 tree expectedAugmentedHessian = GaussianFactorGraph(isam).marginal(toKeep, EliminateQR)->augmentedHessian(); diff --git a/tests/testGaussianJunctionTreeB.cpp b/tests/testGaussianJunctionTreeB.cpp index 2740babd0..a19eb17df 100644 --- a/tests/testGaussianJunctionTreeB.cpp +++ b/tests/testGaussianJunctionTreeB.cpp @@ -67,7 +67,7 @@ TEST( GaussianJunctionTreeB, constructor2 ) { // create a graph NonlinearFactorGraph nlfg; Values values; - boost::tie(nlfg, values) = createNonlinearSmoother(7); + std::tie(nlfg, values) = createNonlinearSmoother(7); SymbolicFactorGraph::shared_ptr symbolic = nlfg.symbolic(); // linearize @@ -130,7 +130,7 @@ TEST( GaussianJunctionTreeB, constructor2 ) { // // create a graph // GaussianFactorGraph fg; // Ordering ordering; -// boost::tie(fg,ordering) = createSmoother(7); +// std::tie(fg,ordering) = createSmoother(7); // // // optimize the graph // GaussianJunctionTree tree(fg); diff --git a/tests/testGncOptimizer.cpp b/tests/testGncOptimizer.cpp index 15d660114..5670c55b0 100644 --- a/tests/testGncOptimizer.cpp +++ b/tests/testGncOptimizer.cpp @@ -739,7 +739,7 @@ TEST(GncOptimizer, optimizeSmallPoseGraph) { const string filename = findExampleDataFile("w100.graph"); NonlinearFactorGraph::shared_ptr graph; Values::shared_ptr initial; - boost::tie(graph, initial) = load2D(filename); + std::tie(graph, initial) = load2D(filename); // Add a Gaussian prior on first poses Pose2 priorMean(0.0, 0.0, 0.0); // prior at origin SharedDiagonal priorNoise = noiseModel::Diagonal::Sigmas( diff --git a/tests/testGradientDescentOptimizer.cpp b/tests/testGradientDescentOptimizer.cpp index 83b0f9e8a..731e51f36 100644 --- a/tests/testGradientDescentOptimizer.cpp +++ b/tests/testGradientDescentOptimizer.cpp @@ -20,13 +20,12 @@ #include -#include using namespace std; using namespace gtsam; // Generate a small PoseSLAM problem -boost::tuple generateProblem() { +std::tuple generateProblem() { // 1. Create graph container and add factors to it NonlinearFactorGraph graph; @@ -64,7 +63,7 @@ boost::tuple generateProblem() { Pose2 x5(2.1, 2.1, -M_PI_2); initialEstimate.insert(5, x5); - return boost::tie(graph, initialEstimate); + return std::tie(graph, initialEstimate); } /* ************************************************************************* */ @@ -73,7 +72,7 @@ TEST(NonlinearConjugateGradientOptimizer, Optimize) { NonlinearFactorGraph graph; Values initialEstimate; - boost::tie(graph, initialEstimate) = generateProblem(); + std::tie(graph, initialEstimate) = generateProblem(); // cout << "initial error = " << graph.error(initialEstimate) << endl; NonlinearOptimizerParams param; diff --git a/tests/testGraph.cpp b/tests/testGraph.cpp index 0d2a25fbd..92b2c56c9 100644 --- a/tests/testGraph.cpp +++ b/tests/testGraph.cpp @@ -68,7 +68,7 @@ TEST( Graph, predecessorMap2Graph ) p_map.insert(1, 2); p_map.insert(2, 2); p_map.insert(3, 2); - boost::tie(graph, root, key2vertex) = predecessorMap2Graph, SVertex, Key>(p_map); + std::tie(graph, root, key2vertex) = predecessorMap2Graph, SVertex, Key>(p_map); LONGS_EQUAL(3, (long)boost::num_vertices(graph)); CHECK(root == key2vertex[2]); @@ -174,7 +174,7 @@ TEST( GaussianFactorGraph, findMinimumSpanningTree ) // G.push_factor("x3", "x4"); // // SymbolicFactorGraph T, C; -// boost::tie(T, C) = G.splitMinimumSpanningTree(); +// std::tie(T, C) = G.splitMinimumSpanningTree(); // // SymbolicFactorGraph expectedT, expectedC; // expectedT.push_factor("x1", "x2"); @@ -207,7 +207,7 @@ TEST( GaussianFactorGraph, findMinimumSpanningTree ) // // SymbolicFactorGraph singletonGraph; // set singletons; -// boost::tie(singletonGraph, singletons) = G.removeSingletons(); +// std::tie(singletonGraph, singletons) = G.removeSingletons(); // // set singletons_excepted; singletons_excepted += "x1", "x2", "x5", "l1", "l3"; // CHECK(singletons_excepted == singletons); diff --git a/tests/testIterative.cpp b/tests/testIterative.cpp index 5519a7f04..5f981e2dd 100644 --- a/tests/testIterative.cpp +++ b/tests/testIterative.cpp @@ -62,7 +62,7 @@ TEST( Iterative, conjugateGradientDescent ) Matrix A; Vector b; Vector x0 = Z_6x1; - boost::tie(A, b) = fg.jacobian(); + std::tie(A, b) = fg.jacobian(); Vector expectedX = (Vector(6) << -0.1, 0.1, -0.1, -0.1, 0.1, -0.2).finished(); // Do conjugate gradient descent, System version diff --git a/tests/testNonlinearOptimizer.cpp b/tests/testNonlinearOptimizer.cpp index c15de4f5d..b73ac7376 100644 --- a/tests/testNonlinearOptimizer.cpp +++ b/tests/testNonlinearOptimizer.cpp @@ -31,8 +31,6 @@ #include -#include -using boost::adaptors::map_values; #include #include @@ -302,7 +300,9 @@ TEST_UNSAFE(NonlinearOptimizer, MoreOptimization) { GaussianFactorGraph::shared_ptr linear = optimizer.linearize(); VectorValues d = linear->hessianDiagonal(); VectorValues sqrtHessianDiagonal = d; - for (Vector& v : sqrtHessianDiagonal | map_values) v = v.cwiseSqrt(); + for (auto& [key, value] : sqrtHessianDiagonal) { + value = value.cwiseSqrt(); + } GaussianFactorGraph damped = optimizer.buildDampedSystem(*linear, sqrtHessianDiagonal); VectorValues expectedDiagonal = d + params.lambdaInitial * d; EXPECT(assert_equal(expectedDiagonal, damped.hessianDiagonal())); diff --git a/tests/testSubgraphPreconditioner.cpp b/tests/testSubgraphPreconditioner.cpp index 8abd54795..155795939 100644 --- a/tests/testSubgraphPreconditioner.cpp +++ b/tests/testSubgraphPreconditioner.cpp @@ -29,9 +29,6 @@ #include -#include -#include - #include using namespace std; @@ -67,7 +64,7 @@ TEST(SubgraphPreconditioner, planarGraph) { // Check planar graph construction GaussianFactorGraph A; VectorValues xtrue; - boost::tie(A, xtrue) = planarGraph(3); + std::tie(A, xtrue) = planarGraph(3); LONGS_EQUAL(13, A.size()); LONGS_EQUAL(9, xtrue.size()); DOUBLES_EQUAL(0, error(A, xtrue), 1e-9); // check zero error for xtrue @@ -83,11 +80,11 @@ TEST(SubgraphPreconditioner, splitOffPlanarTree) { // Build a planar graph GaussianFactorGraph A; VectorValues xtrue; - boost::tie(A, xtrue) = planarGraph(3); + std::tie(A, xtrue) = planarGraph(3); // Get the spanning tree and constraints, and check their sizes GaussianFactorGraph T, C; - boost::tie(T, C) = splitOffPlanarTree(3, A); + std::tie(T, C) = splitOffPlanarTree(3, A); LONGS_EQUAL(9, T.size()); LONGS_EQUAL(4, C.size()); @@ -103,11 +100,11 @@ TEST(SubgraphPreconditioner, system) { GaussianFactorGraph Ab; VectorValues xtrue; size_t N = 3; - boost::tie(Ab, xtrue) = planarGraph(N); // A*x-b + std::tie(Ab, xtrue) = planarGraph(N); // A*x-b // Get the spanning tree and remaining graph GaussianFactorGraph Ab1, Ab2; // A1*x-b1 and A2*x-b2 - boost::tie(Ab1, Ab2) = splitOffPlanarTree(N, Ab); + std::tie(Ab1, Ab2) = splitOffPlanarTree(N, Ab); // Eliminate the spanning tree to build a prior const Ordering ord = planarOrdering(N); @@ -199,11 +196,11 @@ TEST(SubgraphPreconditioner, conjugateGradients) { GaussianFactorGraph Ab; VectorValues xtrue; size_t N = 3; - boost::tie(Ab, xtrue) = planarGraph(N); // A*x-b + std::tie(Ab, xtrue) = planarGraph(N); // A*x-b // Get the spanning tree GaussianFactorGraph Ab1, Ab2; // A1*x-b1 and A2*x-b2 - boost::tie(Ab1, Ab2) = splitOffPlanarTree(N, Ab); + std::tie(Ab1, Ab2) = splitOffPlanarTree(N, Ab); // Eliminate the spanning tree to build a prior GaussianBayesNet Rc1 = *Ab1.eliminateSequential(); // R1*x-c1 diff --git a/timing/timeGaussianFactor.cpp b/timing/timeGaussianFactor.cpp index feb738439..045b5ef67 100644 --- a/timing/timeGaussianFactor.cpp +++ b/timing/timeGaussianFactor.cpp @@ -21,7 +21,6 @@ #include using namespace std; -#include #include #include @@ -106,7 +105,7 @@ int main() JacobianFactor::shared_ptr factor; for(int i = 0; i < n; i++) - boost::tie(conditional, factor) = + std::tie(conditional, factor) = JacobianFactor(combined).eliminate(Ordering{_x2_}); long timeLog2 = clock(); diff --git a/timing/timeGaussianFactorGraph.cpp b/timing/timeGaussianFactorGraph.cpp index 574579f84..a5e9b6eed 100644 --- a/timing/timeGaussianFactorGraph.cpp +++ b/timing/timeGaussianFactorGraph.cpp @@ -63,7 +63,7 @@ double timePlanarSmootherEliminate(int N, bool old = true) { //double timePlanarSmootherJoinAug(int N, size_t reps) { // GaussianFactorGraph fgBase; // VectorValues config; -// boost::tie(fgBase,config) = planarGraph(N); +// std::tie(fgBase,config) = planarGraph(N); // Ordering ordering = fgBase.getOrdering(); // Symbol key = ordering.front(); // @@ -96,7 +96,7 @@ double timePlanarSmootherEliminate(int N, bool old = true) { //double timePlanarSmootherCombined(int N, size_t reps) { // GaussianFactorGraph fgBase; // VectorValues config; -// boost::tie(fgBase,config) = planarGraph(N); +// std::tie(fgBase,config) = planarGraph(N); // Ordering ordering = fgBase.getOrdering(); // Symbol key = ordering.front(); // diff --git a/timing/timeIncremental.cpp b/timing/timeIncremental.cpp index b8bcca5e7..a1948f115 100644 --- a/timing/timeIncremental.cpp +++ b/timing/timeIncremental.cpp @@ -27,7 +27,6 @@ #include #include #include -#include using namespace std; using namespace gtsam; @@ -225,9 +224,14 @@ int main(int argc, char *argv[]) { try { Marginals marginals(graph, values); int i=0; - for (Key key1: boost::adaptors::reverse(values.keys())) { + // Assign the keyvector to a named variable + auto keys = values.keys(); + // Iterate over it in reverse + for (auto it1 = keys.rbegin(); it1 != keys.rend(); ++it1) { + Key key1 = *it1; int j=0; - for (Key key2: boost::adaptors::reverse(values.keys())) { + for (auto it2 = keys.rbegin(); it2 != keys.rend(); ++it2) { + Key key2 = *it2; if(i != j) { gttic_(jointMarginalInformation); KeyVector keys(2); diff --git a/timing/timeLago.cpp b/timing/timeLago.cpp index bb506de36..d1e10bbe2 100644 --- a/timing/timeLago.cpp +++ b/timing/timeLago.cpp @@ -37,7 +37,7 @@ int main(int argc, char *argv[]) { NonlinearFactorGraph::shared_ptr g; string inputFile = findExampleDataFile("w10000"); auto model = noiseModel::Diagonal::Sigmas((Vector(3) << 0.05, 0.05, 5.0 * M_PI / 180.0).finished()); - boost::tie(g, solution) = load2D(inputFile, model); + std::tie(g, solution) = load2D(inputFile, model); // add noise to create initial estimate Values initial;