diff --git a/examples/Pose2SLAMExample_g2o.cpp b/examples/Pose2SLAMExample_g2o.cpp index fb3869a50..4a77f7be1 100644 --- a/examples/Pose2SLAMExample_g2o.cpp +++ b/examples/Pose2SLAMExample_g2o.cpp @@ -48,16 +48,16 @@ int main(const int argc, const char *argv[]) { Values::shared_ptr initial; bool is3D = false; if (kernelType.compare("none") == 0) { - boost::tie(graph, initial) = readG2o(g2oFile, is3D); + std::tie(graph, initial) = readG2o(g2oFile, is3D); } if (kernelType.compare("huber") == 0) { std::cout << "Using robust kernel: huber " << std::endl; - boost::tie(graph, initial) = + std::tie(graph, initial) = readG2o(g2oFile, is3D, KernelFunctionTypeHUBER); } if (kernelType.compare("tukey") == 0) { std::cout << "Using robust kernel: tukey " << std::endl; - boost::tie(graph, initial) = + std::tie(graph, initial) = readG2o(g2oFile, is3D, KernelFunctionTypeTUKEY); } @@ -90,7 +90,7 @@ int main(const int argc, const char *argv[]) { std::cout << "Writing results to file: " << outputFile << std::endl; NonlinearFactorGraph::shared_ptr graphNoKernel; Values::shared_ptr initial2; - boost::tie(graphNoKernel, initial2) = readG2o(g2oFile); + std::tie(graphNoKernel, initial2) = readG2o(g2oFile); writeG2o(*graphNoKernel, result, outputFile); std::cout << "done! " << std::endl; } diff --git a/examples/Pose2SLAMExample_graph.cpp b/examples/Pose2SLAMExample_graph.cpp index c4301d28d..cf2513864 100644 --- a/examples/Pose2SLAMExample_graph.cpp +++ b/examples/Pose2SLAMExample_graph.cpp @@ -36,7 +36,7 @@ int main (int argc, char** argv) { Values::shared_ptr initial; SharedDiagonal model = noiseModel::Diagonal::Sigmas((Vector(3) << 0.05, 0.05, 5.0 * M_PI / 180.0).finished()); string graph_file = findExampleDataFile("w100.graph"); - boost::tie(graph, initial) = load2D(graph_file, model); + std::tie(graph, initial) = load2D(graph_file, model); initial->print("Initial estimate:\n"); // Add a Gaussian prior on first poses diff --git a/examples/Pose2SLAMExample_lago.cpp b/examples/Pose2SLAMExample_lago.cpp index 1d02c64fa..c470e3904 100644 --- a/examples/Pose2SLAMExample_lago.cpp +++ b/examples/Pose2SLAMExample_lago.cpp @@ -37,7 +37,7 @@ int main(const int argc, const char *argv[]) { NonlinearFactorGraph::shared_ptr graph; Values::shared_ptr initial; - boost::tie(graph, initial) = readG2o(g2oFile); + std::tie(graph, initial) = readG2o(g2oFile); // Add prior on the pose having index (key) = 0 auto priorModel = noiseModel::Diagonal::Variances(Vector3(1e-6, 1e-6, 1e-8)); @@ -55,7 +55,7 @@ int main(const int argc, const char *argv[]) { std::cout << "Writing results to file: " << outputFile << std::endl; NonlinearFactorGraph::shared_ptr graphNoKernel; Values::shared_ptr initial2; - boost::tie(graphNoKernel, initial2) = readG2o(g2oFile); + std::tie(graphNoKernel, initial2) = readG2o(g2oFile); writeG2o(*graphNoKernel, estimateLago, outputFile); std::cout << "done! " << std::endl; } diff --git a/examples/Pose3Localization.cpp b/examples/Pose3Localization.cpp index 44076ab38..4fb200faf 100644 --- a/examples/Pose3Localization.cpp +++ b/examples/Pose3Localization.cpp @@ -37,7 +37,7 @@ int main(const int argc, const char* argv[]) { NonlinearFactorGraph::shared_ptr graph; Values::shared_ptr initial; bool is3D = true; - boost::tie(graph, initial) = readG2o(g2oFile, is3D); + std::tie(graph, initial) = readG2o(g2oFile, is3D); // Add prior on the first key auto priorModel = noiseModel::Diagonal::Variances( @@ -67,7 +67,7 @@ int main(const int argc, const char* argv[]) { std::cout << "Writing results to file: " << outputFile << std::endl; NonlinearFactorGraph::shared_ptr graphNoKernel; Values::shared_ptr initial2; - boost::tie(graphNoKernel, initial2) = readG2o(g2oFile); + std::tie(graphNoKernel, initial2) = readG2o(g2oFile); writeG2o(*graphNoKernel, result, outputFile); std::cout << "done! " << std::endl; } diff --git a/examples/Pose3SLAMExample_changeKeys.cpp b/examples/Pose3SLAMExample_changeKeys.cpp index 392b1c417..ca9448ea3 100644 --- a/examples/Pose3SLAMExample_changeKeys.cpp +++ b/examples/Pose3SLAMExample_changeKeys.cpp @@ -36,7 +36,7 @@ int main(const int argc, const char *argv[]) { NonlinearFactorGraph::shared_ptr graph; Values::shared_ptr initial; bool is3D = true; - boost::tie(graph, initial) = readG2o(g2oFile, is3D); + std::tie(graph, initial) = readG2o(g2oFile, is3D); bool add = false; Key firstKey = 8646911284551352320; diff --git a/examples/Pose3SLAMExample_g2o.cpp b/examples/Pose3SLAMExample_g2o.cpp index 7ae2978ce..0c99a75a0 100644 --- a/examples/Pose3SLAMExample_g2o.cpp +++ b/examples/Pose3SLAMExample_g2o.cpp @@ -36,7 +36,7 @@ int main(const int argc, const char* argv[]) { NonlinearFactorGraph::shared_ptr graph; Values::shared_ptr initial; bool is3D = true; - boost::tie(graph, initial) = readG2o(g2oFile, is3D); + std::tie(graph, initial) = readG2o(g2oFile, is3D); // Add prior on the first key auto priorModel = noiseModel::Diagonal::Variances( @@ -66,7 +66,7 @@ int main(const int argc, const char* argv[]) { std::cout << "Writing results to file: " << outputFile << std::endl; NonlinearFactorGraph::shared_ptr graphNoKernel; Values::shared_ptr initial2; - boost::tie(graphNoKernel, initial2) = readG2o(g2oFile); + std::tie(graphNoKernel, initial2) = readG2o(g2oFile); writeG2o(*graphNoKernel, result, outputFile); std::cout << "done! " << std::endl; } diff --git a/examples/Pose3SLAMExample_initializePose3Chordal.cpp b/examples/Pose3SLAMExample_initializePose3Chordal.cpp index 03db9fc77..c1cc97c85 100644 --- a/examples/Pose3SLAMExample_initializePose3Chordal.cpp +++ b/examples/Pose3SLAMExample_initializePose3Chordal.cpp @@ -36,7 +36,7 @@ int main(const int argc, const char* argv[]) { NonlinearFactorGraph::shared_ptr graph; Values::shared_ptr initial; bool is3D = true; - boost::tie(graph, initial) = readG2o(g2oFile, is3D); + std::tie(graph, initial) = readG2o(g2oFile, is3D); // Add prior on the first key auto priorModel = noiseModel::Diagonal::Variances( @@ -60,7 +60,7 @@ int main(const int argc, const char* argv[]) { std::cout << "Writing results to file: " << outputFile << std::endl; NonlinearFactorGraph::shared_ptr graphNoKernel; Values::shared_ptr initial2; - boost::tie(graphNoKernel, initial2) = readG2o(g2oFile); + std::tie(graphNoKernel, initial2) = readG2o(g2oFile); writeG2o(*graphNoKernel, initialization, outputFile); std::cout << "done! " << std::endl; } diff --git a/examples/Pose3SLAMExample_initializePose3Gradient.cpp b/examples/Pose3SLAMExample_initializePose3Gradient.cpp index 31693c5ff..740391017 100644 --- a/examples/Pose3SLAMExample_initializePose3Gradient.cpp +++ b/examples/Pose3SLAMExample_initializePose3Gradient.cpp @@ -36,7 +36,7 @@ int main(const int argc, const char* argv[]) { NonlinearFactorGraph::shared_ptr graph; Values::shared_ptr initial; bool is3D = true; - boost::tie(graph, initial) = readG2o(g2oFile, is3D); + std::tie(graph, initial) = readG2o(g2oFile, is3D); // Add prior on the first key auto priorModel = noiseModel::Diagonal::Variances( @@ -66,7 +66,7 @@ int main(const int argc, const char* argv[]) { std::cout << "Writing results to file: " << outputFile << std::endl; NonlinearFactorGraph::shared_ptr graphNoKernel; Values::shared_ptr initial2; - boost::tie(graphNoKernel, initial2) = readG2o(g2oFile); + std::tie(graphNoKernel, initial2) = readG2o(g2oFile); writeG2o(*graphNoKernel, initialization, outputFile); std::cout << "done! " << std::endl; } diff --git a/examples/RangeISAMExample_plaza2.cpp b/examples/RangeISAMExample_plaza2.cpp index aa61ffc6c..73c69862d 100644 --- a/examples/RangeISAMExample_plaza2.cpp +++ b/examples/RangeISAMExample_plaza2.cpp @@ -92,7 +92,7 @@ std::list readOdometry() { // load the ranges from TD // Time (sec) Sender / Antenna ID Receiver Node ID Range (m) -using RangeTriple = boost::tuple; +using RangeTriple = std::tuple; std::vector readTriples() { std::vector triples; std::string data_file = gtsam::findExampleDataFile("Plaza2_TD.txt"); @@ -166,7 +166,7 @@ int main(int argc, char** argv) { //--------------------------------- odometry loop -------------------------- double t; Pose2 odometry; - boost::tie(t, odometry) = timedOdometry; + std::tie(t, odometry) = timedOdometry; // add odometry factor newFactors.emplace_shared>(i - 1, i, odometry, diff --git a/examples/ShonanAveragingCLI.cpp b/examples/ShonanAveragingCLI.cpp index c72a32017..831b608e3 100644 --- a/examples/ShonanAveragingCLI.cpp +++ b/examples/ShonanAveragingCLI.cpp @@ -103,7 +103,7 @@ int main(int argc, char* argv[]) { auto result = shonan.run(initial, pMin); // Parse file again to set up translation problem, adding a prior - boost::tie(inputGraph, posesInFile) = load2D(inputFile); + std::tie(inputGraph, posesInFile) = load2D(inputFile); auto priorModel = noiseModel::Unit::Create(3); inputGraph->addPrior(0, posesInFile->at(0), priorModel); @@ -119,7 +119,7 @@ int main(int argc, char* argv[]) { auto result = shonan.run(initial, pMin); // Parse file again to set up translation problem, adding a prior - boost::tie(inputGraph, posesInFile) = load3D(inputFile); + std::tie(inputGraph, posesInFile) = load3D(inputFile); auto priorModel = noiseModel::Unit::Create(6); inputGraph->addPrior(0, posesInFile->at(0), priorModel); diff --git a/gtsam/base/Matrix.cpp b/gtsam/base/Matrix.cpp index 5b8a021d4..6dc7d38ef 100644 --- a/gtsam/base/Matrix.cpp +++ b/gtsam/base/Matrix.cpp @@ -251,7 +251,7 @@ pair qr(const Matrix& A) { // calculate the Householder vector v double beta; Vector vjm; - boost::tie(beta,vjm) = house(xjm); + std::tie(beta,vjm) = house(xjm); // pad with zeros to get m-dimensional vector v for(size_t k = 0 ; k < m; k++) @@ -269,13 +269,13 @@ pair qr(const Matrix& A) { } /* ************************************************************************* */ -list > +list > weighted_eliminate(Matrix& A, Vector& b, const Vector& sigmas) { size_t m = A.rows(), n = A.cols(); // get size(A) size_t maxRank = min(m,n); // create list - list > results; + list > results; Vector pseudo(m); // allocate storage for pseudo-inverse Vector weights = sigmas.array().square().inverse(); // calculate weights once @@ -304,7 +304,7 @@ weighted_eliminate(Matrix& A, Vector& b, const Vector& sigmas) { // construct solution (r, d, sigma) // TODO: avoid sqrt, store precision or at least variance - results.push_back(boost::make_tuple(r, d, 1./sqrt(precision))); + results.push_back(std::make_tuple(r, d, 1./sqrt(precision))); // exit after rank exhausted if (results.size()>=maxRank) break; @@ -565,7 +565,7 @@ void svd(const Matrix& A, Matrix& U, Vector& S, Matrix& V) { } /* ************************************************************************* */ -boost::tuple DLT(const Matrix& A, double rank_tol) { +std::tuple DLT(const Matrix& A, double rank_tol) { // Check size of A size_t n = A.rows(), p = A.cols(), m = min(n,p); @@ -582,7 +582,7 @@ boost::tuple DLT(const Matrix& A, double rank_tol) { // Return rank, error, and corresponding column of V double error = m

((int)rank, error, Vector(column(V, p-1))); + return std::tuple((int)rank, error, Vector(column(V, p-1))); } /* ************************************************************************* */ diff --git a/gtsam/base/Matrix.h b/gtsam/base/Matrix.h index 88714d6f6..317473739 100644 --- a/gtsam/base/Matrix.h +++ b/gtsam/base/Matrix.h @@ -307,7 +307,7 @@ GTSAM_EXPORT void inplace_QR(Matrix& A); * @param sigmas is a vector of the measurement standard deviation * @return list of r vectors, d and sigma */ -GTSAM_EXPORT std::list > +GTSAM_EXPORT std::list > weighted_eliminate(Matrix& A, Vector& b, const Vector& sigmas); /** @@ -434,7 +434,7 @@ GTSAM_EXPORT void svd(const Matrix& A, Matrix& U, Vector& S, Matrix& V); * Returns rank of A, minimum error (singular value), * and corresponding eigenvector (column of V, with A=U*S*V') */ -GTSAM_EXPORT boost::tuple +GTSAM_EXPORT std::tuple DLT(const Matrix& A, double rank_tol = 1e-9); /** diff --git a/gtsam/base/tests/testMatrix.cpp b/gtsam/base/tests/testMatrix.cpp index fb3bd948c..c0e5c9e69 100644 --- a/gtsam/base/tests/testMatrix.cpp +++ b/gtsam/base/tests/testMatrix.cpp @@ -859,7 +859,7 @@ TEST(Matrix, qr ) 7.4536, 0, 00, 0, 0, 10.9545, 00, 0, 0, 0, 00, 0, 0, 0).finished(); Matrix Q, R; - boost::tie(Q, R) = qr(A); + std::tie(Q, R) = qr(A); EXPECT(assert_equal(expectedQ, Q, 1e-4)); EXPECT(assert_equal(expectedR, R, 1e-4)); EXPECT(assert_equal(A, Q*R, 1e-14)); @@ -911,7 +911,7 @@ TEST(Matrix, weighted_elimination ) // perform elimination Matrix A1 = A; Vector b1 = b; - std::list > solution = + std::list > solution = weighted_eliminate(A1, b1, sigmas); // unpack and verify @@ -919,7 +919,7 @@ TEST(Matrix, weighted_elimination ) for (const auto& tuple : solution) { Vector r; double di, sigma; - boost::tie(r, di, sigma) = tuple; + std::tie(r, di, sigma) = tuple; EXPECT(assert_equal(r, expectedR.row(i))); // verify r DOUBLES_EQUAL(d(i), di, 1e-8); // verify d DOUBLES_EQUAL(newSigmas(i), sigma, 1e-5); // verify sigma @@ -1146,7 +1146,7 @@ TEST(Matrix, DLT ) int rank; double error; Vector actual; - boost::tie(rank,error,actual) = DLT(A); + std::tie(rank,error,actual) = DLT(A); Vector expected = (Vector(9) << -0.0, 0.2357, 0.4714, -0.2357, 0.0, - 0.4714,-0.4714, 0.4714, 0.0).finished(); EXPECT_LONGS_EQUAL(8,rank); EXPECT_DOUBLES_EQUAL(0,error,1e-8); diff --git a/gtsam/base/tests/testVector.cpp b/gtsam/base/tests/testVector.cpp index c87732b09..7532d78b9 100644 --- a/gtsam/base/tests/testVector.cpp +++ b/gtsam/base/tests/testVector.cpp @@ -156,7 +156,7 @@ TEST(Vector, weightedPseudoinverse ) // perform solve Vector actual; double precision; - boost::tie(actual, precision) = weightedPseudoinverse(x, weights); + std::tie(actual, precision) = weightedPseudoinverse(x, weights); // construct expected Vector expected(2); @@ -181,7 +181,7 @@ TEST(Vector, weightedPseudoinverse_constraint ) Vector weights = sigmas.array().square().inverse(); // perform solve Vector actual; double precision; - boost::tie(actual, precision) = weightedPseudoinverse(x, weights); + std::tie(actual, precision) = weightedPseudoinverse(x, weights); // construct expected Vector expected(2); @@ -199,7 +199,7 @@ TEST(Vector, weightedPseudoinverse_nan ) Vector sigmas = (Vector(4) << 0.1, 0.1, 0., 0.).finished(); Vector weights = sigmas.array().square().inverse(); Vector pseudo; double precision; - boost::tie(pseudo, precision) = weightedPseudoinverse(a, weights); + std::tie(pseudo, precision) = weightedPseudoinverse(a, weights); Vector expected = (Vector(4) << 1., 0., 0.,0.).finished(); EXPECT(assert_equal(expected, pseudo)); diff --git a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp index 9439ff417..a4d19c96c 100644 --- a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp +++ b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp @@ -111,7 +111,7 @@ TEST(DiscreteFactorGraph, test) { frontalKeys += Key(0); DiscreteConditional::shared_ptr conditional; DecisionTreeFactor::shared_ptr newFactor; - boost::tie(conditional, newFactor) = EliminateDiscrete(graph, frontalKeys); + std::tie(conditional, newFactor) = EliminateDiscrete(graph, frontalKeys); // Check Conditional CHECK(conditional); @@ -130,7 +130,7 @@ TEST(DiscreteFactorGraph, test) { DiscreteEliminationTree etree(graph, ordering); DiscreteBayesNet::shared_ptr actual; DiscreteFactorGraph::shared_ptr remainingGraph; - boost::tie(actual, remainingGraph) = etree.eliminate(&EliminateDiscrete); + std::tie(actual, remainingGraph) = etree.eliminate(&EliminateDiscrete); // Check Bayes net DiscreteBayesNet expectedBayesNet; diff --git a/gtsam/geometry/Rot3.cpp b/gtsam/geometry/Rot3.cpp index e854d03b7..153b9690e 100644 --- a/gtsam/geometry/Rot3.cpp +++ b/gtsam/geometry/Rot3.cpp @@ -170,13 +170,13 @@ Vector3 Rot3::xyz(OptionalJacobian<3, 3> H) const { #endif Matrix39 qHm; - boost::tie(I, q) = RQ(m, qHm); + std::tie(I, q) = RQ(m, qHm); // TODO : Explore whether this expression can be optimized as both // qHm and mH are super-sparse *H = qHm * mH; } else - boost::tie(I, q) = RQ(matrix()); + std::tie(I, q) = RQ(matrix()); return q; } diff --git a/gtsam/geometry/tests/testRot3.cpp b/gtsam/geometry/tests/testRot3.cpp index 0e7967715..2f48948eb 100644 --- a/gtsam/geometry/tests/testRot3.cpp +++ b/gtsam/geometry/tests/testRot3.cpp @@ -510,7 +510,7 @@ TEST( Rot3, RQ) // Try RQ on a pure rotation Matrix actualK; Vector actual; - boost::tie(actualK, actual) = RQ(R.matrix()); + std::tie(actualK, actual) = RQ(R.matrix()); Vector expected = Vector3(0.14715, 0.385821, 0.231671); CHECK(assert_equal(I_3x3,actualK)); CHECK(assert_equal(expected,actual,1e-6)); @@ -531,7 +531,7 @@ TEST( Rot3, RQ) // Try RQ to recover calibration from 3*3 sub-block of projection matrix Matrix K = (Matrix(3, 3) << 500.0, 0.0, 320.0, 0.0, 500.0, 240.0, 0.0, 0.0, 1.0).finished(); Matrix A = K * R.matrix(); - boost::tie(actualK, actual) = RQ(A); + std::tie(actualK, actual) = RQ(A); CHECK(assert_equal(K,actualK)); CHECK(assert_equal(expected,actual,1e-6)); } diff --git a/gtsam/geometry/triangulation.cpp b/gtsam/geometry/triangulation.cpp index acb0ddf0e..245b8634a 100644 --- a/gtsam/geometry/triangulation.cpp +++ b/gtsam/geometry/triangulation.cpp @@ -51,7 +51,7 @@ Vector4 triangulateHomogeneousDLT( int rank; double error; Vector v; - boost::tie(rank, error, v) = DLT(A, rank_tol); + std::tie(rank, error, v) = DLT(A, rank_tol); if (rank < 3) throw(TriangulationUnderconstrainedException()); @@ -82,7 +82,7 @@ Vector4 triangulateHomogeneousDLT( int rank; double error; Vector v; - boost::tie(rank, error, v) = DLT(A, rank_tol); + std::tie(rank, error, v) = DLT(A, rank_tol); if (rank < 3) throw(TriangulationUnderconstrainedException()); diff --git a/gtsam/geometry/triangulation.h b/gtsam/geometry/triangulation.h index 24bc94d80..05c66f8df 100644 --- a/gtsam/geometry/triangulation.h +++ b/gtsam/geometry/triangulation.h @@ -195,7 +195,7 @@ Point3 triangulateNonlinear(const std::vector& poses, // Create a factor graph and initial values Values values; NonlinearFactorGraph graph; - boost::tie(graph, values) = triangulationGraph // + std::tie(graph, values) = triangulationGraph // (poses, sharedCal, measurements, Symbol('p', 0), initialEstimate, model); return optimize(graph, values, Symbol('p', 0)); @@ -217,7 +217,7 @@ Point3 triangulateNonlinear( // Create a factor graph and initial values Values values; NonlinearFactorGraph graph; - boost::tie(graph, values) = triangulationGraph // + std::tie(graph, values) = triangulationGraph // (cameras, measurements, Symbol('p', 0), initialEstimate, model); return optimize(graph, values, Symbol('p', 0)); diff --git a/gtsam/hybrid/HybridJunctionTree.cpp b/gtsam/hybrid/HybridJunctionTree.cpp index 6c0e62ac9..70cfc4be4 100644 --- a/gtsam/hybrid/HybridJunctionTree.cpp +++ b/gtsam/hybrid/HybridJunctionTree.cpp @@ -102,7 +102,7 @@ struct HybridConstructorTraversalData { keyAsOrdering.push_back(node->key); SymbolicConditional::shared_ptr conditional; SymbolicFactor::shared_ptr separatorFactor; - boost::tie(conditional, separatorFactor) = + std::tie(conditional, separatorFactor) = internal::EliminateSymbolic(symbolicFactors, keyAsOrdering); // Store symbolic elimination results in the parent diff --git a/gtsam/inference/BayesTree-inst.h b/gtsam/inference/BayesTree-inst.h index 8e771708f..8d3712624 100644 --- a/gtsam/inference/BayesTree-inst.h +++ b/gtsam/inference/BayesTree-inst.h @@ -361,7 +361,7 @@ namespace gtsam { } // Factor into C1\B | B. sharedFactorGraph temp_remaining; - boost::tie(p_C1_B, temp_remaining) = + std::tie(p_C1_B, temp_remaining) = FactorGraphType(p_C1_Bred).eliminatePartialMultifrontal(Ordering(C1_minus_B), function); } std::shared_ptr p_C2_B; { @@ -373,7 +373,7 @@ namespace gtsam { } // Factor into C2\B | B. sharedFactorGraph temp_remaining; - boost::tie(p_C2_B, temp_remaining) = + std::tie(p_C2_B, temp_remaining) = FactorGraphType(p_C2_Bred).eliminatePartialMultifrontal(Ordering(C2_minus_B), function); } gttoc(Full_root_factoring); diff --git a/gtsam/inference/EliminateableFactorGraph-inst.h b/gtsam/inference/EliminateableFactorGraph-inst.h index 558acbfe8..9667da041 100644 --- a/gtsam/inference/EliminateableFactorGraph-inst.h +++ b/gtsam/inference/EliminateableFactorGraph-inst.h @@ -75,7 +75,7 @@ namespace gtsam { EliminationTreeType etree(asDerived(), (*variableIndex).get(), ordering); std::shared_ptr bayesNet; std::shared_ptr factorGraph; - boost::tie(bayesNet,factorGraph) = etree.eliminate(function); + std::tie(bayesNet,factorGraph) = etree.eliminate(function); // If any factors are remaining, the ordering was incomplete if(!factorGraph->empty()) throw InconsistentEliminationRequested(); @@ -139,7 +139,7 @@ namespace gtsam { JunctionTreeType junctionTree(etree); std::shared_ptr bayesTree; std::shared_ptr factorGraph; - boost::tie(bayesTree,factorGraph) = junctionTree.eliminate(function); + std::tie(bayesTree,factorGraph) = junctionTree.eliminate(function); // If any factors are remaining, the ordering was incomplete if(!factorGraph->empty()) throw InconsistentEliminationRequested(); @@ -277,7 +277,7 @@ namespace gtsam { // in the order requested. std::shared_ptr bayesTree; std::shared_ptr factorGraph; - boost::tie(bayesTree,factorGraph) = + std::tie(bayesTree,factorGraph) = eliminatePartialMultifrontal(marginalizedVariableOrdering, function, variableIndex); if(const Ordering* varsAsOrdering = boost::get(&variables)) @@ -344,7 +344,7 @@ namespace gtsam { // in the order requested. std::shared_ptr bayesTree; std::shared_ptr factorGraph; - boost::tie(bayesTree,factorGraph) = + std::tie(bayesTree,factorGraph) = eliminatePartialMultifrontal(marginalizedVariableOrdering, function, variableIndex); if(const Ordering* varsAsOrdering = boost::get(&variables)) diff --git a/gtsam/inference/JunctionTree-inst.h b/gtsam/inference/JunctionTree-inst.h index 86be13760..c607857c5 100644 --- a/gtsam/inference/JunctionTree-inst.h +++ b/gtsam/inference/JunctionTree-inst.h @@ -85,7 +85,7 @@ struct ConstructorTraversalData { keyAsOrdering.push_back(ETreeNode->key); SymbolicConditional::shared_ptr myConditional; SymbolicFactor::shared_ptr mySeparatorFactor; - boost::tie(myConditional, mySeparatorFactor) = internal::EliminateSymbolic( + std::tie(myConditional, mySeparatorFactor) = internal::EliminateSymbolic( symbolicFactors, keyAsOrdering); // Store symbolic elimination results in the parent diff --git a/gtsam/inference/VariableSlots.h b/gtsam/inference/VariableSlots.h index a665890c2..62fc0d9c5 100644 --- a/gtsam/inference/VariableSlots.h +++ b/gtsam/inference/VariableSlots.h @@ -111,7 +111,7 @@ VariableSlots::VariableSlots(const FG& factorGraph) // the array entry for each factor that will indicate the factor // does not involve the variable. iterator thisVarSlots; bool inserted; - boost::tie(thisVarSlots, inserted) = this->insert(std::make_pair(involvedVariable, FastVector())); + std::tie(thisVarSlots, inserted) = this->insert(std::make_pair(involvedVariable, FastVector())); if(inserted) thisVarSlots->second.resize(factorGraph.nrFactors(), Empty); thisVarSlots->second[jointFactorPos] = factorVarSlot; diff --git a/gtsam/inference/graph-inl.h b/gtsam/inference/graph-inl.h index 93bb2a51c..ceae2e3ab 100644 --- a/gtsam/inference/graph-inl.h +++ b/gtsam/inference/graph-inl.h @@ -54,7 +54,7 @@ std::list predecessorMap2Keys(const PredecessorMap& p_map) { SGraph g; SVertex root; std::map key2vertex; - boost::tie(g, root, key2vertex) = gtsam::predecessorMap2Graph, SVertex, KEY>(p_map); + std::tie(g, root, key2vertex) = gtsam::predecessorMap2Graph, SVertex, KEY>(p_map); // breadth first visit on the graph std::list keys; @@ -114,7 +114,7 @@ SDGraph toBoostGraph(const G& graph) { /* ************************************************************************* */ template -boost::tuple > +std::tuple > predecessorMap2Graph(const PredecessorMap& p_map) { G g; @@ -146,7 +146,7 @@ predecessorMap2Graph(const PredecessorMap& p_map) { if (!foundRoot) throw std::invalid_argument("predecessorMap2Graph: invalid predecessor map!"); else - return boost::tuple >(g, root, key2vertex); + return std::tuple >(g, root, key2vertex); } /* ************************************************************************* */ @@ -185,7 +185,7 @@ std::shared_ptr composePoses(const G& graph, const PredecessorMap& PoseGraph g; PoseVertex root; std::map key2vertex; - boost::tie(g, root, key2vertex) = + std::tie(g, root, key2vertex) = predecessorMap2Graph(tree); // attach the relative poses to the edges @@ -207,8 +207,8 @@ std::shared_ptr composePoses(const G& graph, const PredecessorMap& PoseVertex v2 = key2vertex.find(key2)->second; POSE l1Xl2 = factor->measured(); - boost::tie(edge12, found1) = boost::edge(v1, v2, g); - boost::tie(edge21, found2) = boost::edge(v2, v1, g); + std::tie(edge12, found1) = boost::edge(v1, v2, g); + std::tie(edge21, found2) = boost::edge(v2, v1, g); if (found1 && found2) throw std::invalid_argument ("composePoses: invalid spanning tree"); if (!found1 && !found2) continue; if (found1) diff --git a/gtsam/inference/graph.h b/gtsam/inference/graph.h index 9f5f4c9bc..988c79b74 100644 --- a/gtsam/inference/graph.h +++ b/gtsam/inference/graph.h @@ -83,7 +83,7 @@ namespace gtsam { * V = Vertex type */ template - boost::tuple > predecessorMap2Graph(const PredecessorMap& p_map); + std::tuple > predecessorMap2Graph(const PredecessorMap& p_map); /** * Compose the poses by following the chain specified by the spanning tree diff --git a/gtsam/linear/HessianFactor.cpp b/gtsam/linear/HessianFactor.cpp index 194a7ca53..cf815ee3a 100644 --- a/gtsam/linear/HessianFactor.cpp +++ b/gtsam/linear/HessianFactor.cpp @@ -417,7 +417,7 @@ void HessianFactor::multiplyHessianAdd(double alpha, const VectorValues& x, for (DenseIndex i = 0; i < (DenseIndex) size(); ++i) { bool didNotExist; VectorValues::iterator it; - boost::tie(it, didNotExist) = yvalues.tryInsert(keys_[i], Vector()); + std::tie(it, didNotExist) = yvalues.tryInsert(keys_[i], Vector()); if (didNotExist) it->second = alpha * y[i]; // init else diff --git a/gtsam/linear/JacobianFactor.cpp b/gtsam/linear/JacobianFactor.cpp index 6f8bd99f9..6779a033f 100644 --- a/gtsam/linear/JacobianFactor.cpp +++ b/gtsam/linear/JacobianFactor.cpp @@ -102,7 +102,7 @@ JacobianFactor::JacobianFactor(const HessianFactor& factor) // Do Cholesky to get a Jacobian size_t maxrank; bool success; - boost::tie(maxrank, success) = choleskyCareful(Ab_.matrix()); + std::tie(maxrank, success) = choleskyCareful(Ab_.matrix()); // Check that Cholesky succeeded OR it managed to factor the full Hessian. // THe latter case occurs with non-positive definite matrices arising from QP. @@ -122,7 +122,7 @@ JacobianFactor::JacobianFactor(const HessianFactor& factor) /* ************************************************************************* */ // Helper functions for combine constructor namespace { -boost::tuple, DenseIndex, DenseIndex> _countDims( +std::tuple, DenseIndex, DenseIndex> _countDims( const FastVector& factors, const FastVector& variableSlots) { gttic(countDims); @@ -188,7 +188,7 @@ boost::tuple, DenseIndex, DenseIndex> _countDims( } #endif - return boost::make_tuple(varDims, m, n); + return std::make_tuple(varDims, m, n); } /* ************************************************************************* */ @@ -221,7 +221,7 @@ void JacobianFactor::JacobianFactorHelper(const GaussianFactorGraph& graph, // Count dimensions FastVector varDims; DenseIndex m, n; - boost::tie(varDims, m, n) = _countDims(jacobians, orderedSlots); + std::tie(varDims, m, n) = _countDims(jacobians, orderedSlots); // Allocate matrix and copy keys in order gttic(allocate); diff --git a/gtsam/linear/NoiseModel.cpp b/gtsam/linear/NoiseModel.cpp index 32dd1c535..02e44e2a8 100644 --- a/gtsam/linear/NoiseModel.cpp +++ b/gtsam/linear/NoiseModel.cpp @@ -480,7 +480,7 @@ SharedDiagonal Constrained::QR(Matrix& Ab) const { const size_t maxRank = min(m, n); // create storage for [R d] - typedef boost::tuple Triple; + typedef std::tuple Triple; list Rd; Matrix rd(1, n + 1); // and for row of R @@ -506,7 +506,7 @@ SharedDiagonal Constrained::QR(Matrix& Ab) const { rd = Ab.row(*constraint_row); // Construct solution (r, d, sigma) - Rd.push_back(boost::make_tuple(j, rd, kInfinity)); + Rd.push_back(std::make_tuple(j, rd, kInfinity)); // exit after rank exhausted if (Rd.size() >= maxRank) @@ -552,7 +552,7 @@ SharedDiagonal Constrained::QR(Matrix& Ab) const { rd.block(0, j + 1, 1, n - j) = pseudo.transpose() * Ab.block(0, j + 1, m, n - j); // construct solution (r, d, sigma) - Rd.push_back(boost::make_tuple(j, rd, precision)); + Rd.push_back(std::make_tuple(j, rd, precision)); } else { // If precision is zero, no information on this column // This is actually not limited to constraints, could happen in Gaussian::QR @@ -577,9 +577,9 @@ SharedDiagonal Constrained::QR(Matrix& Ab) const { bool mixed = false; Ab.setZero(); // make sure we don't look below for (const Triple& t: Rd) { - const size_t& j = t.get<0>(); - const Matrix& rd = t.get<1>(); - precisions(i) = t.get<2>(); + const size_t& j = std::get<0>(t); + const Matrix& rd = std::get<1>(t); + precisions(i) = std::get<2>(t); if (std::isinf(precisions(i))) mixed = true; Ab.block(i, j, 1, n + 1 - j) = rd.block(0, j, 1, n + 1 - j); diff --git a/gtsam/linear/VectorValues.cpp b/gtsam/linear/VectorValues.cpp index 04792b6ba..c1d715618 100644 --- a/gtsam/linear/VectorValues.cpp +++ b/gtsam/linear/VectorValues.cpp @@ -51,7 +51,7 @@ namespace gtsam { for (const Pair& v : dims) { Key key; size_t n; - boost::tie(key, n) = v; + std::tie(key, n) = v; #ifdef TBB_GREATER_EQUAL_2020 values_.emplace(key, x.segment(j, n)); #else @@ -215,11 +215,11 @@ namespace gtsam { /* ************************************************************************ */ namespace internal { - bool structureCompareOp(const boost::tuple& vv) { - return vv.get<0>().first == vv.get<1>().first - && vv.get<0>().second.size() == vv.get<1>().second.size(); + return std::get<0>(vv).first == std::get<1>(vv).first + && std::get<0>(vv).second.size() == std::get<1>(vv).second.size(); } } @@ -236,7 +236,7 @@ namespace gtsam { if(this->size() != v.size()) throw invalid_argument("VectorValues::dot called with a VectorValues of different structure"); double result = 0.0; - typedef boost::tuple ValuePair; + typedef std::tuple ValuePair; using boost::adaptors::map_values; for(const ValuePair values: boost::combine(*this, v)) { assert_throw(values.get<0>().first == values.get<1>().first, diff --git a/gtsam/linear/tests/testGaussianBayesNet.cpp b/gtsam/linear/tests/testGaussianBayesNet.cpp index ec208fa7b..316a46ce2 100644 --- a/gtsam/linear/tests/testGaussianBayesNet.cpp +++ b/gtsam/linear/tests/testGaussianBayesNet.cpp @@ -52,7 +52,7 @@ static GaussianBayesNet noisyBayesNet = { TEST( GaussianBayesNet, Matrix ) { Matrix R; Vector d; - boost::tie(R,d) = smallBayesNet.matrix(); // find matrix and RHS + std::tie(R,d) = smallBayesNet.matrix(); // find matrix and RHS Matrix R1 = (Matrix2() << 1.0, 1.0, @@ -102,7 +102,7 @@ TEST(GaussianBayesNet, Evaluate2) { TEST( GaussianBayesNet, NoisyMatrix ) { Matrix R; Vector d; - boost::tie(R,d) = noisyBayesNet.matrix(); // find matrix and RHS + std::tie(R,d) = noisyBayesNet.matrix(); // find matrix and RHS Matrix R1 = (Matrix2() << 0.5, 0.5, @@ -126,7 +126,7 @@ TEST(GaussianBayesNet, Optimize) { TEST(GaussianBayesNet, NoisyOptimize) { Matrix R; Vector d; - boost::tie(R, d) = noisyBayesNet.matrix(); // find matrix and RHS + std::tie(R, d) = noisyBayesNet.matrix(); // find matrix and RHS const Vector x = R.inverse() * d; const VectorValues expected{{_x_, x.head(1)}, {_y_, x.tail(1)}}; @@ -239,7 +239,7 @@ TEST( GaussianBayesNet, MatrixStress ) const Ordering ordering(keys); Matrix R; Vector d; - boost::tie(R, d) = bn.matrix(ordering); + std::tie(R, d) = bn.matrix(ordering); EXPECT(assert_equal(expected.vector(ordering), R.inverse() * d)); } } diff --git a/gtsam/linear/tests/testGaussianFactorGraph.cpp b/gtsam/linear/tests/testGaussianFactorGraph.cpp index 2ed4e6589..d078ddf20 100644 --- a/gtsam/linear/tests/testGaussianFactorGraph.cpp +++ b/gtsam/linear/tests/testGaussianFactorGraph.cpp @@ -157,7 +157,7 @@ TEST(GaussianFactorGraph, matrices) { Vector b = Ab.col(Ab.cols() - 1); Matrix actualA; Vector actualb; - boost::tie(actualA, actualb) = gfg.jacobian(); + std::tie(actualA, actualb) = gfg.jacobian(); EXPECT(assert_equal(A, actualA)); EXPECT(assert_equal(b, actualb)); @@ -166,7 +166,7 @@ TEST(GaussianFactorGraph, matrices) { Vector eta = A.transpose() * b; Matrix actualL; Vector actualeta; - boost::tie(actualL, actualeta) = gfg.hessian(); + std::tie(actualL, actualeta) = gfg.hessian(); EXPECT(assert_equal(L, actualL)); EXPECT(assert_equal(eta, actualeta)); @@ -247,7 +247,7 @@ TEST(GaussianFactorGraph, eliminate_empty) { gfg.add(JacobianFactor()); GaussianBayesNet::shared_ptr actualBN; GaussianFactorGraph::shared_ptr remainingGFG; - boost::tie(actualBN, remainingGFG) = gfg.eliminatePartialSequential(Ordering()); + std::tie(actualBN, remainingGFG) = gfg.eliminatePartialSequential(Ordering()); // expected Bayes net is empty GaussianBayesNet expectedBN; @@ -265,10 +265,10 @@ TEST(GaussianFactorGraph, matrices2) { GaussianFactorGraph gfg = createSimpleGaussianFactorGraph(); Matrix A; Vector b; - boost::tie(A, b) = gfg.jacobian(); + std::tie(A, b) = gfg.jacobian(); Matrix AtA; Vector eta; - boost::tie(AtA, eta) = gfg.hessian(); + std::tie(AtA, eta) = gfg.hessian(); EXPECT(assert_equal(A.transpose() * A, AtA)); EXPECT(assert_equal(A.transpose() * b, eta)); Matrix expectedAtA(6, 6); @@ -316,7 +316,7 @@ TEST(GaussianFactorGraph, multiplyHessianAdd2) { // brute force Matrix AtA; Vector eta; - boost::tie(AtA, eta) = gfg.hessian(); + std::tie(AtA, eta) = gfg.hessian(); Vector X(6); X << 1, 2, 3, 4, 5, 6; Vector Y(6); @@ -343,10 +343,10 @@ TEST(GaussianFactorGraph, matricesMixed) { GaussianFactorGraph gfg = createGaussianFactorGraphWithHessianFactor(); Matrix A; Vector b; - boost::tie(A, b) = gfg.jacobian(); // incorrect ! + std::tie(A, b) = gfg.jacobian(); // incorrect ! Matrix AtA; Vector eta; - boost::tie(AtA, eta) = gfg.hessian(); // correct + std::tie(AtA, eta) = gfg.hessian(); // correct EXPECT(assert_equal(A.transpose() * A, AtA)); Vector expected = -(Vector(6) << -25, 17.5, 5, -13.5, 29, 4).finished(); EXPECT(assert_equal(expected, eta)); diff --git a/gtsam/linear/tests/testHessianFactor.cpp b/gtsam/linear/tests/testHessianFactor.cpp index 5fd6e833f..8821dfdd3 100644 --- a/gtsam/linear/tests/testHessianFactor.cpp +++ b/gtsam/linear/tests/testHessianFactor.cpp @@ -295,13 +295,13 @@ TEST(HessianFactor, CombineAndEliminate1) { Ordering ordering {1}; GaussianConditional::shared_ptr expectedConditional; JacobianFactor::shared_ptr expectedFactor; - boost::tie(expectedConditional, expectedFactor) = jacobian.eliminate(ordering); + std::tie(expectedConditional, expectedFactor) = jacobian.eliminate(ordering); CHECK(expectedFactor); // Eliminate GaussianConditional::shared_ptr actualConditional; HessianFactor::shared_ptr actualHessian; - boost::tie(actualConditional, actualHessian) = // + std::tie(actualConditional, actualHessian) = // EliminateCholesky(gfg, ordering); actualConditional->setModel(false,Vector3(1,1,1)); // add a unit model for comparison @@ -358,13 +358,13 @@ TEST(HessianFactor, CombineAndEliminate2) { Ordering ordering {0}; GaussianConditional::shared_ptr expectedConditional; JacobianFactor::shared_ptr expectedFactor; - boost::tie(expectedConditional, expectedFactor) = // + std::tie(expectedConditional, expectedFactor) = // jacobian.eliminate(ordering); // Eliminate GaussianConditional::shared_ptr actualConditional; HessianFactor::shared_ptr actualHessian; - boost::tie(actualConditional, actualHessian) = // + std::tie(actualConditional, actualHessian) = // EliminateCholesky(gfg, ordering); actualConditional->setModel(false,Vector3(1,1,1)); // add a unit model for comparison @@ -498,7 +498,7 @@ TEST(HessianFactor, gradientAtZero) // test gradient at zero VectorValues expectedG{{0, -g1}, {1, -g2}}; - Matrix A; Vector b; boost::tie(A,b) = factor.jacobian(); + Matrix A; Vector b; std::tie(A,b) = factor.jacobian(); KeyVector keys {0, 1}; EXPECT(assert_equal(-A.transpose()*b, expectedG.vector(keys))); VectorValues actualG = factor.gradientAtZero(); diff --git a/gtsam/linear/tests/testJacobianFactor.cpp b/gtsam/linear/tests/testJacobianFactor.cpp index ad722eb94..5b3be6492 100644 --- a/gtsam/linear/tests/testJacobianFactor.cpp +++ b/gtsam/linear/tests/testJacobianFactor.cpp @@ -371,7 +371,7 @@ TEST(JacobianFactor, operators ) EXPECT(assert_equal(expectedX, actualX)); // test gradient at zero - Matrix A; Vector b2; boost::tie(A,b2) = lf.jacobian(); + Matrix A; Vector b2; std::tie(A,b2) = lf.jacobian(); VectorValues expectedG; expectedG.insert(1, Vector2(20,-10)); expectedG.insert(2, Vector2(-20, 10)); diff --git a/gtsam/navigation/ManifoldPreintegration.cpp b/gtsam/navigation/ManifoldPreintegration.cpp index 526bf6c73..c0c917d9c 100644 --- a/gtsam/navigation/ManifoldPreintegration.cpp +++ b/gtsam/navigation/ManifoldPreintegration.cpp @@ -68,7 +68,7 @@ void ManifoldPreintegration::update(const Vector3& measuredAcc, // Possibly correct for sensor pose Matrix3 D_correctedAcc_acc, D_correctedAcc_omega, D_correctedOmega_omega; if (p().body_P_sensor) - boost::tie(acc, omega) = correctMeasurementsBySensorPose(acc, omega, + std::tie(acc, omega) = correctMeasurementsBySensorPose(acc, omega, D_correctedAcc_acc, D_correctedAcc_omega, D_correctedOmega_omega); // Save current rotation for updating Jacobians diff --git a/gtsam/navigation/TangentPreintegration.cpp b/gtsam/navigation/TangentPreintegration.cpp index e8579daba..a472b2cfd 100644 --- a/gtsam/navigation/TangentPreintegration.cpp +++ b/gtsam/navigation/TangentPreintegration.cpp @@ -112,7 +112,7 @@ void TangentPreintegration::update(const Vector3& measuredAcc, // Possibly correct for sensor pose by converting to body frame Matrix3 D_correctedAcc_acc, D_correctedAcc_omega, D_correctedOmega_omega; if (p().body_P_sensor) - boost::tie(acc, omega) = correctMeasurementsBySensorPose(acc, omega, + std::tie(acc, omega) = correctMeasurementsBySensorPose(acc, omega, D_correctedAcc_acc, D_correctedAcc_omega, D_correctedOmega_omega); // Do update diff --git a/gtsam/navigation/tests/testGPSFactor.cpp b/gtsam/navigation/tests/testGPSFactor.cpp index 741ec4dfd..c3c6bb2d2 100644 --- a/gtsam/navigation/tests/testGPSFactor.cpp +++ b/gtsam/navigation/tests/testGPSFactor.cpp @@ -125,7 +125,7 @@ TEST(GPSData, init) { // Estimate initial state Pose3 T; Vector3 nV; - boost::tie(T, nV) = GPSFactor::EstimateState(t1, NED1, t2, NED2, 84831.0796); + std::tie(T, nV) = GPSFactor::EstimateState(t1, NED1, t2, NED2, 84831.0796); // Check values values EXPECT(assert_equal((Vector )Vector3(29.9575, -29.0564, -1.95993), nV, 1e-4)); diff --git a/gtsam/nonlinear/Expression-inl.h b/gtsam/nonlinear/Expression-inl.h index 35eef342c..c4899c66b 100644 --- a/gtsam/nonlinear/Expression-inl.h +++ b/gtsam/nonlinear/Expression-inl.h @@ -143,7 +143,7 @@ T Expression::value(const Values& values, // Call private version that returns derivatives in H KeyVector keys; FastVector dims; - boost::tie(keys, dims) = keysAndDims(); + std::tie(keys, dims) = keysAndDims(); return valueAndDerivatives(values, keys, dims, *H); } else // no derivatives needed, just return value diff --git a/gtsam/nonlinear/ExpressionFactor.h b/gtsam/nonlinear/ExpressionFactor.h index 8c4e8ce86..ada4609b9 100644 --- a/gtsam/nonlinear/ExpressionFactor.h +++ b/gtsam/nonlinear/ExpressionFactor.h @@ -180,7 +180,7 @@ protected: if (keys_.empty()) { // This is the case when called in ExpressionFactor Constructor. // We then take the keys from the expression in sorted order. - boost::tie(keys_, dims_) = expression_.keysAndDims(); + std::tie(keys_, dims_) = expression_.keysAndDims(); } else { // This happens with classes derived from BinaryExpressionFactor etc. // In that case, the keys_ are already defined and we just need to grab diff --git a/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.cpp b/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.cpp index 54db42b79..fbf8062f6 100644 --- a/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.cpp +++ b/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.cpp @@ -65,7 +65,7 @@ NonlinearConjugateGradientOptimizer::System::State NonlinearConjugateGradientOpt GaussianFactorGraph::shared_ptr NonlinearConjugateGradientOptimizer::iterate() { Values newValues; int dummy; - boost::tie(newValues, dummy) = nonlinearConjugateGradient( + std::tie(newValues, dummy) = nonlinearConjugateGradient( System(graph_), state_->values, params_, true /* single iteration */); state_.reset(new State(newValues, graph_.error(newValues), state_->iterations + 1)); @@ -78,7 +78,7 @@ const Values& NonlinearConjugateGradientOptimizer::optimize() { System system(graph_); Values newValues; int iterations; - boost::tie(newValues, iterations) = + std::tie(newValues, iterations) = nonlinearConjugateGradient(system, state_->values, params_, false); state_.reset(new State(std::move(newValues), graph_.error(newValues), iterations)); return state_->values; diff --git a/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h b/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h index 780d41635..8f73fc355 100644 --- a/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h +++ b/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h @@ -145,7 +145,7 @@ double lineSearch(const S &system, const V currentValues, const W &gradient) { * The last parameter is a switch between gradient-descent and conjugate gradient */ template -boost::tuple nonlinearConjugateGradient(const S &system, +std::tuple nonlinearConjugateGradient(const S &system, const V &initial, const NonlinearOptimizerParams ¶ms, const bool singleIteration, const bool gradientDescent = false) { @@ -160,7 +160,7 @@ boost::tuple nonlinearConjugateGradient(const S &system, std::cout << "Exiting, as error = " << currentError << " < " << params.errorTol << std::endl; } - return boost::tie(initial, iteration); + return std::tie(initial, iteration); } V currentValues = initial; @@ -218,7 +218,7 @@ boost::tuple nonlinearConjugateGradient(const S &system, << "nonlinearConjugateGradient: Terminating because reached maximum iterations" << std::endl; - return boost::tie(currentValues, iteration); + return std::tie(currentValues, iteration); } } // \ namespace gtsam diff --git a/gtsam/slam/JacobianFactorQR.h b/gtsam/slam/JacobianFactorQR.h index 5d59f3c62..a10dae1a0 100644 --- a/gtsam/slam/JacobianFactorQR.h +++ b/gtsam/slam/JacobianFactorQR.h @@ -48,7 +48,7 @@ public: GaussianFactorGraph::shared_ptr fg; KeyVector variables; variables.push_back(pointKey); - boost::tie(bn, fg) = gfg.eliminatePartialSequential(variables, EliminateQR); + std::tie(bn, fg) = gfg.eliminatePartialSequential(variables, EliminateQR); //fg->print("fg"); JacobianFactor::operator=(JacobianFactor(*fg)); diff --git a/gtsam/slam/tests/testDataset.cpp b/gtsam/slam/tests/testDataset.cpp index 9413ae4bd..af4d7612f 100644 --- a/gtsam/slam/tests/testDataset.cpp +++ b/gtsam/slam/tests/testDataset.cpp @@ -94,7 +94,7 @@ TEST(dataSet, load2D) { const string filename = findExampleDataFile("w100.graph"); NonlinearFactorGraph::shared_ptr graph; Values::shared_ptr initial; - boost::tie(graph, initial) = load2D(filename); + std::tie(graph, initial) = load2D(filename); EXPECT_LONGS_EQUAL(300, graph->size()); EXPECT_LONGS_EQUAL(100, initial->size()); auto model = noiseModel::Unit::Create(3); @@ -139,13 +139,13 @@ TEST(dataSet, load2DVictoriaPark) { Values::shared_ptr initial; // Load all - boost::tie(graph, initial) = load2D(filename); + std::tie(graph, initial) = load2D(filename); EXPECT_LONGS_EQUAL(10608, graph->size()); EXPECT_LONGS_EQUAL(7120, initial->size()); // Restrict keys size_t maxIndex = 5; - boost::tie(graph, initial) = load2D(filename, nullptr, maxIndex); + std::tie(graph, initial) = load2D(filename, nullptr, maxIndex); EXPECT_LONGS_EQUAL(5, graph->size()); EXPECT_LONGS_EQUAL(6, initial->size()); // file has 0 as well EXPECT_LONGS_EQUAL(L(5), graph->at(4)->keys()[1]); @@ -221,7 +221,7 @@ TEST(dataSet, readG2o3D) { NonlinearFactorGraph::shared_ptr actualGraph; Values::shared_ptr actualValues; bool is3D = true; - boost::tie(actualGraph, actualValues) = readG2o(g2oFile, is3D); + std::tie(actualGraph, actualValues) = readG2o(g2oFile, is3D); EXPECT(assert_equal(expectedGraph, *actualGraph, 1e-5)); for (size_t j : {0, 1, 2, 3, 4}) { EXPECT(assert_equal(poses[j], actualValues->at(j), 1e-5)); @@ -235,7 +235,7 @@ TEST( dataSet, readG2o3DNonDiagonalNoise) NonlinearFactorGraph::shared_ptr actualGraph; Values::shared_ptr actualValues; bool is3D = true; - boost::tie(actualGraph, actualValues) = readG2o(g2oFile, is3D); + std::tie(actualGraph, actualValues) = readG2o(g2oFile, is3D); Values expectedValues; Rot3 R0 = Rot3::Quaternion(1.000000, 0.000000, 0.000000, 0.000000 ); @@ -329,7 +329,7 @@ TEST(dataSet, readG2o) { const string g2oFile = findExampleDataFile("pose2example"); NonlinearFactorGraph::shared_ptr actualGraph; Values::shared_ptr actualValues; - boost::tie(actualGraph, actualValues) = readG2o(g2oFile); + std::tie(actualGraph, actualValues) = readG2o(g2oFile); auto model = noiseModel::Diagonal::Precisions( Vector3(44.721360, 44.721360, 30.901699)); @@ -356,7 +356,7 @@ TEST(dataSet, readG2oHuber) { NonlinearFactorGraph::shared_ptr actualGraph; Values::shared_ptr actualValues; bool is3D = false; - boost::tie(actualGraph, actualValues) = + std::tie(actualGraph, actualValues) = readG2o(g2oFile, is3D, KernelFunctionTypeHUBER); auto baseModel = noiseModel::Diagonal::Precisions( @@ -373,7 +373,7 @@ TEST(dataSet, readG2oTukey) { NonlinearFactorGraph::shared_ptr actualGraph; Values::shared_ptr actualValues; bool is3D = false; - boost::tie(actualGraph, actualValues) = + std::tie(actualGraph, actualValues) = readG2o(g2oFile, is3D, KernelFunctionTypeTUKEY); auto baseModel = noiseModel::Diagonal::Precisions( @@ -390,14 +390,14 @@ TEST( dataSet, writeG2o) const string g2oFile = findExampleDataFile("pose2example"); NonlinearFactorGraph::shared_ptr expectedGraph; Values::shared_ptr expectedValues; - boost::tie(expectedGraph, expectedValues) = readG2o(g2oFile); + std::tie(expectedGraph, expectedValues) = readG2o(g2oFile); const string filenameToWrite = createRewrittenFileName(g2oFile); writeG2o(*expectedGraph, *expectedValues, filenameToWrite); NonlinearFactorGraph::shared_ptr actualGraph; Values::shared_ptr actualValues; - boost::tie(actualGraph, actualValues) = readG2o(filenameToWrite); + std::tie(actualGraph, actualValues) = readG2o(filenameToWrite); EXPECT(assert_equal(*expectedValues,*actualValues,1e-5)); EXPECT(assert_equal(*expectedGraph,*actualGraph,1e-5)); } @@ -409,14 +409,14 @@ TEST( dataSet, writeG2o3D) NonlinearFactorGraph::shared_ptr expectedGraph; Values::shared_ptr expectedValues; bool is3D = true; - boost::tie(expectedGraph, expectedValues) = readG2o(g2oFile, is3D); + std::tie(expectedGraph, expectedValues) = readG2o(g2oFile, is3D); const string filenameToWrite = createRewrittenFileName(g2oFile); writeG2o(*expectedGraph, *expectedValues, filenameToWrite); NonlinearFactorGraph::shared_ptr actualGraph; Values::shared_ptr actualValues; - boost::tie(actualGraph, actualValues) = readG2o(filenameToWrite, is3D); + std::tie(actualGraph, actualValues) = readG2o(filenameToWrite, is3D); EXPECT(assert_equal(*expectedValues,*actualValues,1e-4)); EXPECT(assert_equal(*expectedGraph,*actualGraph,1e-4)); } @@ -428,14 +428,14 @@ TEST( dataSet, writeG2o3DNonDiagonalNoise) NonlinearFactorGraph::shared_ptr expectedGraph; Values::shared_ptr expectedValues; bool is3D = true; - boost::tie(expectedGraph, expectedValues) = readG2o(g2oFile, is3D); + std::tie(expectedGraph, expectedValues) = readG2o(g2oFile, is3D); const string filenameToWrite = createRewrittenFileName(g2oFile); writeG2o(*expectedGraph, *expectedValues, filenameToWrite); NonlinearFactorGraph::shared_ptr actualGraph; Values::shared_ptr actualValues; - boost::tie(actualGraph, actualValues) = readG2o(filenameToWrite, is3D); + std::tie(actualGraph, actualValues) = readG2o(filenameToWrite, is3D); EXPECT(assert_equal(*expectedValues,*actualValues,1e-4)); EXPECT(assert_equal(*expectedGraph,*actualGraph,1e-4)); } diff --git a/gtsam/slam/tests/testInitializePose.cpp b/gtsam/slam/tests/testInitializePose.cpp index 1373cb576..ec53effc7 100644 --- a/gtsam/slam/tests/testInitializePose.cpp +++ b/gtsam/slam/tests/testInitializePose.cpp @@ -36,7 +36,7 @@ TEST(InitializePose3, computePoses2D) { NonlinearFactorGraph::shared_ptr inputGraph; Values::shared_ptr posesInFile; bool is3D = false; - boost::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D); + std::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D); auto priorModel = noiseModel::Unit::Create(3); inputGraph->addPrior(0, posesInFile->at(0), priorModel); @@ -59,7 +59,7 @@ TEST(InitializePose3, computePoses3D) { NonlinearFactorGraph::shared_ptr inputGraph; Values::shared_ptr posesInFile; bool is3D = true; - boost::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D); + std::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D); auto priorModel = noiseModel::Unit::Create(6); inputGraph->addPrior(0, posesInFile->at(0), priorModel); diff --git a/gtsam/slam/tests/testInitializePose3.cpp b/gtsam/slam/tests/testInitializePose3.cpp index 37d65320a..37997ab0e 100644 --- a/gtsam/slam/tests/testInitializePose3.cpp +++ b/gtsam/slam/tests/testInitializePose3.cpp @@ -234,7 +234,7 @@ TEST( InitializePose3, orientationsGradient ) { NonlinearFactorGraph::shared_ptr matlabGraph; Values::shared_ptr matlabValues; bool is3D = true; - boost::tie(matlabGraph, matlabValues) = readG2o(matlabResultsfile, is3D); + std::tie(matlabGraph, matlabValues) = readG2o(matlabResultsfile, is3D); Rot3 R0Expected = matlabValues->at(1).rotation(); EXPECT(assert_equal(R0Expected, orientations.at(x0), 1e-4)); @@ -269,7 +269,7 @@ TEST(InitializePose3, initializePoses) { NonlinearFactorGraph::shared_ptr inputGraph; Values::shared_ptr posesInFile; bool is3D = true; - boost::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D); + std::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D); auto priorModel = noiseModel::Unit::Create(6); inputGraph->addPrior(0, Pose3(), priorModel); diff --git a/gtsam/slam/tests/testLago.cpp b/gtsam/slam/tests/testLago.cpp index fda39c169..43049994b 100644 --- a/gtsam/slam/tests/testLago.cpp +++ b/gtsam/slam/tests/testLago.cpp @@ -260,7 +260,7 @@ TEST( Lago, largeGraphNoisy_orientations ) { string inputFile = findExampleDataFile("noisyToyGraph"); NonlinearFactorGraph::shared_ptr g; Values::shared_ptr initial; - boost::tie(g, initial) = readG2o(inputFile); + std::tie(g, initial) = readG2o(inputFile); // Add prior on the pose having index (key) = 0 NonlinearFactorGraph graphWithPrior = *g; @@ -281,7 +281,7 @@ TEST( Lago, largeGraphNoisy_orientations ) { string matlabFile = findExampleDataFile("orientationsNoisyToyGraph"); NonlinearFactorGraph::shared_ptr gmatlab; Values::shared_ptr expected; - boost::tie(gmatlab, expected) = readG2o(matlabFile); + std::tie(gmatlab, expected) = readG2o(matlabFile); for(const auto& key_pose: expected->extract()){ const Key& k = key_pose.first; @@ -296,7 +296,7 @@ TEST( Lago, largeGraphNoisy ) { string inputFile = findExampleDataFile("noisyToyGraph"); NonlinearFactorGraph::shared_ptr g; Values::shared_ptr initial; - boost::tie(g, initial) = readG2o(inputFile); + std::tie(g, initial) = readG2o(inputFile); // Add prior on the pose having index (key) = 0 NonlinearFactorGraph graphWithPrior = *g; @@ -308,7 +308,7 @@ TEST( Lago, largeGraphNoisy ) { string matlabFile = findExampleDataFile("optimizedNoisyToyGraph"); NonlinearFactorGraph::shared_ptr gmatlab; Values::shared_ptr expected; - boost::tie(gmatlab, expected) = readG2o(matlabFile); + std::tie(gmatlab, expected) = readG2o(matlabFile); for(const auto& key_pose: expected->extract()){ const Key& k = key_pose.first; diff --git a/gtsam/symbolic/tests/testSymbolicFactor.cpp b/gtsam/symbolic/tests/testSymbolicFactor.cpp index 646b80ad6..38a00e0c8 100644 --- a/gtsam/symbolic/tests/testSymbolicFactor.cpp +++ b/gtsam/symbolic/tests/testSymbolicFactor.cpp @@ -77,7 +77,7 @@ TEST(SymbolicFactor, EliminateSymbolic) SymbolicFactor::shared_ptr actualFactor; SymbolicConditional::shared_ptr actualConditional; - boost::tie(actualConditional, actualFactor) = + std::tie(actualConditional, actualFactor) = EliminateSymbolic(factors, Ordering{0, 1, 2, 3}); CHECK(assert_equal(expectedConditional, *actualConditional)); diff --git a/gtsam/symbolic/tests/testSymbolicFactorGraph.cpp b/gtsam/symbolic/tests/testSymbolicFactorGraph.cpp index 6c8924be4..96d22c8c4 100644 --- a/gtsam/symbolic/tests/testSymbolicFactorGraph.cpp +++ b/gtsam/symbolic/tests/testSymbolicFactorGraph.cpp @@ -67,7 +67,7 @@ TEST(SymbolicFactorGraph, eliminatePartialSequential) { SymbolicBayesNet::shared_ptr actualBayesNet; SymbolicFactorGraph::shared_ptr actualSfg; - boost::tie(actualBayesNet, actualSfg) = + std::tie(actualBayesNet, actualSfg) = simpleTestGraph2.eliminatePartialSequential(Ordering{0, 1}); EXPECT(assert_equal(expectedSfg, *actualSfg)); @@ -75,7 +75,7 @@ TEST(SymbolicFactorGraph, eliminatePartialSequential) { SymbolicBayesNet::shared_ptr actualBayesNet2; SymbolicFactorGraph::shared_ptr actualSfg2; - boost::tie(actualBayesNet2, actualSfg2) = + std::tie(actualBayesNet2, actualSfg2) = simpleTestGraph2.eliminatePartialSequential(Ordering{0, 1}); EXPECT(assert_equal(expectedSfg, *actualSfg2)); @@ -108,7 +108,7 @@ TEST(SymbolicFactorGraph, eliminatePartialMultifrontal) { SymbolicBayesTree::shared_ptr actualBayesTree; SymbolicFactorGraph::shared_ptr actualFactorGraph; - boost::tie(actualBayesTree, actualFactorGraph) = + std::tie(actualBayesTree, actualFactorGraph) = simpleTestGraph2.eliminatePartialMultifrontal(Ordering{4, 5}); EXPECT(assert_equal(expectedFactorGraph, *actualFactorGraph)); @@ -124,7 +124,7 @@ TEST(SymbolicFactorGraph, eliminatePartialMultifrontal) { SymbolicBayesTree::shared_ptr actualBayesTree2; SymbolicFactorGraph::shared_ptr actualFactorGraph2; - boost::tie(actualBayesTree2, actualFactorGraph2) = + std::tie(actualBayesTree2, actualFactorGraph2) = simpleTestGraph2.eliminatePartialMultifrontal(KeyVector{4, 5}); EXPECT(assert_equal(expectedFactorGraph, *actualFactorGraph2)); diff --git a/gtsam_unstable/discrete/tests/testLoopyBelief.cpp b/gtsam_unstable/discrete/tests/testLoopyBelief.cpp index 63df7d7c4..a34efe665 100644 --- a/gtsam_unstable/discrete/tests/testLoopyBelief.cpp +++ b/gtsam_unstable/discrete/tests/testLoopyBelief.cpp @@ -102,7 +102,7 @@ class LoopyBelief { } if (debug) subGraph.print("------- Subgraph:"); DiscreteFactor::shared_ptr message; - boost::tie(dummyCond, message) = + std::tie(dummyCond, message) = EliminateDiscrete(subGraph, Ordering{neighbor}); // store the new factor into messages messages.insert(make_pair(neighbor, message)); diff --git a/gtsam_unstable/examples/SmartRangeExample_plaza1.cpp b/gtsam_unstable/examples/SmartRangeExample_plaza1.cpp index 37becfef1..641855da0 100644 --- a/gtsam_unstable/examples/SmartRangeExample_plaza1.cpp +++ b/gtsam_unstable/examples/SmartRangeExample_plaza1.cpp @@ -77,7 +77,7 @@ list readOdometry() { // load the ranges from TD // Time (sec) Sender / Antenna ID Receiver Node ID Range (m) -typedef boost::tuple RangeTriple; +typedef std::tuple RangeTriple; vector readTriples() { vector triples; string tdFile = findExampleDataFile("Plaza1_TD.txt"); @@ -165,7 +165,7 @@ int main(int argc, char** argv) { //--------------------------------- odometry loop ----------------------------------------- double t; Pose2 odometry; - boost::tie(t, odometry) = timedOdometry; + std::tie(t, odometry) = timedOdometry; printf("step %d, time = %g\n",(int)i,t); // add odometry factor diff --git a/gtsam_unstable/examples/SmartRangeExample_plaza2.cpp b/gtsam_unstable/examples/SmartRangeExample_plaza2.cpp index 1e6f77b31..1c403f142 100644 --- a/gtsam_unstable/examples/SmartRangeExample_plaza2.cpp +++ b/gtsam_unstable/examples/SmartRangeExample_plaza2.cpp @@ -76,7 +76,7 @@ list readOdometry() { // load the ranges from TD // Time (sec) Sender / Antenna ID Receiver Node ID Range (m) -typedef boost::tuple RangeTriple; +typedef std::tuple RangeTriple; vector readTriples() { vector triples; string tdFile = findExampleDataFile("Plaza2_TD.txt"); @@ -146,7 +146,7 @@ int main(int argc, char** argv) { //--------------------------------- odometry loop ----------------------------------------- double t; Pose2 odometry; - boost::tie(t, odometry) = timedOdometry; + std::tie(t, odometry) = timedOdometry; // add odometry factor newFactors.push_back( diff --git a/gtsam_unstable/geometry/tests/testInvDepthCamera3.cpp b/gtsam_unstable/geometry/tests/testInvDepthCamera3.cpp index 11a24286a..9c314b0f0 100644 --- a/gtsam_unstable/geometry/tests/testInvDepthCamera3.cpp +++ b/gtsam_unstable/geometry/tests/testInvDepthCamera3.cpp @@ -132,7 +132,7 @@ TEST(InvDepthFactor, backproject) Vector5 actual_vec; double actual_inv; - boost::tie(actual_vec, actual_inv) = inv_camera.backproject(z, 4); + std::tie(actual_vec, actual_inv) = inv_camera.backproject(z, 4); EXPECT(assert_equal(expected,actual_vec,1e-7)); EXPECT_DOUBLES_EQUAL(inv_depth,actual_inv,1e-7); } @@ -148,7 +148,7 @@ TEST(InvDepthFactor, backproject2) Vector5 actual_vec; double actual_inv; - boost::tie(actual_vec, actual_inv) = inv_camera.backproject(z, 10); + std::tie(actual_vec, actual_inv) = inv_camera.backproject(z, 10); EXPECT(assert_equal(expected,actual_vec,1e-7)); EXPECT_DOUBLES_EQUAL(inv_depth,actual_inv,1e-7); } diff --git a/gtsam_unstable/linear/ActiveSetSolver-inl.h b/gtsam_unstable/linear/ActiveSetSolver-inl.h index cba639e5d..eeb3cd041 100644 --- a/gtsam_unstable/linear/ActiveSetSolver-inl.h +++ b/gtsam_unstable/linear/ActiveSetSolver-inl.h @@ -45,7 +45,7 @@ namespace gtsam { * * We want the minimum of all those alphas among all inactive inequality. */ -Template boost::tuple This::computeStepSize( +Template std::tuple This::computeStepSize( const InequalityFactorGraph& workingSet, const VectorValues& xk, const VectorValues& p, const double& maxAlpha) const { double minAlpha = maxAlpha; @@ -74,7 +74,7 @@ Template boost::tuple This::computeStepSize( } } } - return boost::make_tuple(minAlpha, closestFactorIx); + return std::make_tuple(minAlpha, closestFactorIx); } /******************************************************************************/ @@ -222,7 +222,7 @@ Template typename This::State This::iterate( double alpha; int factorIx; VectorValues p = newValues - state.values; - boost::tie(alpha, factorIx) = // using 16.41 + std::tie(alpha, factorIx) = // using 16.41 computeStepSize(state.workingSet, state.values, p, POLICY::maxAlpha); // also add to the working set the one that complains the most InequalityFactorGraph newWorkingSet = state.workingSet; diff --git a/gtsam_unstable/linear/ActiveSetSolver.h b/gtsam_unstable/linear/ActiveSetSolver.h index ad8367c06..21d01f902 100644 --- a/gtsam_unstable/linear/ActiveSetSolver.h +++ b/gtsam_unstable/linear/ActiveSetSolver.h @@ -114,7 +114,7 @@ protected: * If there is a blocking constraint, the closest one will be added to the * working set and become active in the next iteration. */ - boost::tuple computeStepSize( + std::tuple computeStepSize( const InequalityFactorGraph& workingSet, const VectorValues& xk, const VectorValues& p, const double& maxAlpha) const; diff --git a/gtsam_unstable/linear/tests/testLPSolver.cpp b/gtsam_unstable/linear/tests/testLPSolver.cpp index 53c8c7618..dc55183cf 100644 --- a/gtsam_unstable/linear/tests/testLPSolver.cpp +++ b/gtsam_unstable/linear/tests/testLPSolver.cpp @@ -73,7 +73,7 @@ TEST(LPInitSolver, InfiniteLoopSingleVar) { VectorValues starter; starter.insert(1, Vector3(0, 0, 2)); VectorValues results, duals; - boost::tie(results, duals) = solver.optimize(starter); + std::tie(results, duals) = solver.optimize(starter); VectorValues expected; expected.insert(1, Vector3(13.5, 6.5, -6.5)); CHECK(assert_equal(results, expected, 1e-7)); @@ -101,7 +101,7 @@ TEST(LPInitSolver, InfiniteLoopMultiVar) { starter.insert(Y, kZero); starter.insert(Z, Vector::Constant(1, 2.0)); VectorValues results, duals; - boost::tie(results, duals) = solver.optimize(starter); + std::tie(results, duals) = solver.optimize(starter); VectorValues expected; expected.insert(X, Vector::Constant(1, 13.5)); expected.insert(Y, Vector::Constant(1, 6.5)); @@ -201,7 +201,7 @@ TEST(LPSolver, SimpleTest1) { CHECK(assert_equal(expected_x1, x1, 1e-10)); VectorValues result, duals; - boost::tie(result, duals) = lpSolver.optimize(init); + std::tie(result, duals) = lpSolver.optimize(init); VectorValues expectedResult; expectedResult.insert(1, Vector2(8. / 3., 2. / 3.)); CHECK(assert_equal(expectedResult, result, 1e-10)); @@ -213,7 +213,7 @@ TEST(LPSolver, TestWithoutInitialValues) { LPSolver lpSolver(lp); VectorValues result, duals, expectedResult; expectedResult.insert(1, Vector2(8. / 3., 2. / 3.)); - boost::tie(result, duals) = lpSolver.optimize(); + std::tie(result, duals) = lpSolver.optimize(); CHECK(assert_equal(expectedResult, result)); } diff --git a/gtsam_unstable/linear/tests/testLinearEquality.cpp b/gtsam_unstable/linear/tests/testLinearEquality.cpp index 58af66a09..db9aefe18 100644 --- a/gtsam_unstable/linear/tests/testLinearEquality.cpp +++ b/gtsam_unstable/linear/tests/testLinearEquality.cpp @@ -204,7 +204,7 @@ TEST(LinearEquality, operators) { // test gradient at zero Matrix A; Vector b2; - boost::tie(A, b2) = lf.jacobian(); + std::tie(A, b2) = lf.jacobian(); VectorValues expectedG; expectedG.insert(1, (Vector(2) << 0.2, -0.1).finished()); expectedG.insert(2, (Vector(2) << -0.2, 0.1).finished()); diff --git a/gtsam_unstable/partition/FindSeparator-inl.h b/gtsam_unstable/partition/FindSeparator-inl.h index 45868e78e..c87fbbd4d 100644 --- a/gtsam_unstable/partition/FindSeparator-inl.h +++ b/gtsam_unstable/partition/FindSeparator-inl.h @@ -252,7 +252,7 @@ namespace gtsam { namespace partition { // run ND on the graph size_t sepsize; sharedInts part; - boost::tie(sepsize, part) = separatorMetis(numKeys, xadj, adjncy, adjwgt, verbose); + std::tie(sepsize, part) = separatorMetis(numKeys, xadj, adjncy, adjwgt, verbose); if (!sepsize) return std::optional(); // convert the 0-1-2 from Metis to 1-2-0, so that the separator is 0, as later @@ -312,7 +312,7 @@ namespace gtsam { namespace partition { // run metis on the graph int edgecut; sharedInts part; - boost::tie(edgecut, part) = edgeMetis(numKeys, xadj, adjncy, adjwgt, verbose); + std::tie(edgecut, part) = edgeMetis(numKeys, xadj, adjncy, adjwgt, verbose); // convert the 0-1-2 from Metis to 1-2-0, so that the separator is 0, as later we will have more submaps MetisResult result; diff --git a/gtsam_unstable/partition/NestedDissection-inl.h b/gtsam_unstable/partition/NestedDissection-inl.h index c377998dd..7dbf45cdc 100644 --- a/gtsam_unstable/partition/NestedDissection-inl.h +++ b/gtsam_unstable/partition/NestedDissection-inl.h @@ -22,7 +22,7 @@ namespace gtsam { namespace partition { fg_(fg), ordering_(ordering){ GenericUnaryGraph unaryFactors; GenericGraph gfg; - boost::tie(unaryFactors, gfg) = fg.createGenericGraph(ordering); + std::tie(unaryFactors, gfg) = fg.createGenericGraph(ordering); // build reverse mapping from integer to symbol int numNodes = ordering.size(); @@ -46,7 +46,7 @@ namespace gtsam { namespace partition { const NLG& fg, const Ordering& ordering, const std::shared_ptr& cuts, const bool verbose) : fg_(fg), ordering_(ordering){ GenericUnaryGraph unaryFactors; GenericGraph gfg; - boost::tie(unaryFactors, gfg) = fg.createGenericGraph(ordering); + std::tie(unaryFactors, gfg) = fg.createGenericGraph(ordering); // build reverse mapping from integer to symbol int numNodes = ordering.size(); diff --git a/gtsam_unstable/slam/tests/testAHRS.cpp b/gtsam_unstable/slam/tests/testAHRS.cpp index d0980f452..a41732f57 100644 --- a/gtsam_unstable/slam/tests/testAHRS.cpp +++ b/gtsam_unstable/slam/tests/testAHRS.cpp @@ -74,7 +74,7 @@ TEST (AHRS, Mechanization_integrate) { AHRS ahrs = AHRS(stationaryU,stationaryF,g_e); Mechanization_bRn2 mech; KalmanFilter::State state; -// boost::tie(mech,state) = ahrs.initialize(g_e); +// std::tie(mech,state) = ahrs.initialize(g_e); // Vector u = Vector3(0.05,0.0,0.0); // double dt = 2; // Rot3 expected; diff --git a/tests/testExpressionFactor.cpp b/tests/testExpressionFactor.cpp index 567e9df5d..cc0ca5d4e 100644 --- a/tests/testExpressionFactor.cpp +++ b/tests/testExpressionFactor.cpp @@ -235,7 +235,7 @@ TEST(ExpressionFactor, Shallow) { // Get and check keys and dims KeyVector keys; FastVector dims; - boost::tie(keys, dims) = expression.keysAndDims(); + std::tie(keys, dims) = expression.keysAndDims(); LONGS_EQUAL(2,keys.size()); LONGS_EQUAL(2,dims.size()); LONGS_EQUAL(1,keys[0]); diff --git a/tests/testGaussianFactorGraphB.cpp b/tests/testGaussianFactorGraphB.cpp index a6aa7bfc5..19a3567e8 100644 --- a/tests/testGaussianFactorGraphB.cpp +++ b/tests/testGaussianFactorGraphB.cpp @@ -117,7 +117,7 @@ TEST(GaussianFactorGraph, eliminateOne_x1_fast) { GaussianFactorGraph fg = createGaussianFactorGraph(); GaussianConditional::shared_ptr conditional; JacobianFactor::shared_ptr remaining; - boost::tie(conditional, remaining) = EliminateQR(fg, Ordering{X(1)}); + std::tie(conditional, remaining) = EliminateQR(fg, Ordering{X(1)}); // create expected Conditional Gaussian Matrix I = 15 * I_2x2, R11 = I, S12 = -0.111111 * I, S13 = -0.444444 * I; @@ -295,7 +295,7 @@ TEST(GaussianFactorGraph, elimination) { // Check matrix Matrix R; Vector d; - boost::tie(R, d) = bayesNet.matrix(); + std::tie(R, d) = bayesNet.matrix(); Matrix expected = (Matrix(2, 2) << 0.707107, -0.353553, 0.0, 0.612372).finished(); Matrix expected2 = diff --git a/tests/testGaussianJunctionTreeB.cpp b/tests/testGaussianJunctionTreeB.cpp index 2740babd0..a19eb17df 100644 --- a/tests/testGaussianJunctionTreeB.cpp +++ b/tests/testGaussianJunctionTreeB.cpp @@ -67,7 +67,7 @@ TEST( GaussianJunctionTreeB, constructor2 ) { // create a graph NonlinearFactorGraph nlfg; Values values; - boost::tie(nlfg, values) = createNonlinearSmoother(7); + std::tie(nlfg, values) = createNonlinearSmoother(7); SymbolicFactorGraph::shared_ptr symbolic = nlfg.symbolic(); // linearize @@ -130,7 +130,7 @@ TEST( GaussianJunctionTreeB, constructor2 ) { // // create a graph // GaussianFactorGraph fg; // Ordering ordering; -// boost::tie(fg,ordering) = createSmoother(7); +// std::tie(fg,ordering) = createSmoother(7); // // // optimize the graph // GaussianJunctionTree tree(fg); diff --git a/tests/testGncOptimizer.cpp b/tests/testGncOptimizer.cpp index 15d660114..5670c55b0 100644 --- a/tests/testGncOptimizer.cpp +++ b/tests/testGncOptimizer.cpp @@ -739,7 +739,7 @@ TEST(GncOptimizer, optimizeSmallPoseGraph) { const string filename = findExampleDataFile("w100.graph"); NonlinearFactorGraph::shared_ptr graph; Values::shared_ptr initial; - boost::tie(graph, initial) = load2D(filename); + std::tie(graph, initial) = load2D(filename); // Add a Gaussian prior on first poses Pose2 priorMean(0.0, 0.0, 0.0); // prior at origin SharedDiagonal priorNoise = noiseModel::Diagonal::Sigmas( diff --git a/tests/testGradientDescentOptimizer.cpp b/tests/testGradientDescentOptimizer.cpp index 83b0f9e8a..fd03b6304 100644 --- a/tests/testGradientDescentOptimizer.cpp +++ b/tests/testGradientDescentOptimizer.cpp @@ -26,7 +26,7 @@ using namespace std; using namespace gtsam; // Generate a small PoseSLAM problem -boost::tuple generateProblem() { +std::tuple generateProblem() { // 1. Create graph container and add factors to it NonlinearFactorGraph graph; @@ -64,7 +64,7 @@ boost::tuple generateProblem() { Pose2 x5(2.1, 2.1, -M_PI_2); initialEstimate.insert(5, x5); - return boost::tie(graph, initialEstimate); + return std::tie(graph, initialEstimate); } /* ************************************************************************* */ @@ -73,7 +73,7 @@ TEST(NonlinearConjugateGradientOptimizer, Optimize) { NonlinearFactorGraph graph; Values initialEstimate; - boost::tie(graph, initialEstimate) = generateProblem(); + std::tie(graph, initialEstimate) = generateProblem(); // cout << "initial error = " << graph.error(initialEstimate) << endl; NonlinearOptimizerParams param; diff --git a/tests/testGraph.cpp b/tests/testGraph.cpp index 0d2a25fbd..92b2c56c9 100644 --- a/tests/testGraph.cpp +++ b/tests/testGraph.cpp @@ -68,7 +68,7 @@ TEST( Graph, predecessorMap2Graph ) p_map.insert(1, 2); p_map.insert(2, 2); p_map.insert(3, 2); - boost::tie(graph, root, key2vertex) = predecessorMap2Graph, SVertex, Key>(p_map); + std::tie(graph, root, key2vertex) = predecessorMap2Graph, SVertex, Key>(p_map); LONGS_EQUAL(3, (long)boost::num_vertices(graph)); CHECK(root == key2vertex[2]); @@ -174,7 +174,7 @@ TEST( GaussianFactorGraph, findMinimumSpanningTree ) // G.push_factor("x3", "x4"); // // SymbolicFactorGraph T, C; -// boost::tie(T, C) = G.splitMinimumSpanningTree(); +// std::tie(T, C) = G.splitMinimumSpanningTree(); // // SymbolicFactorGraph expectedT, expectedC; // expectedT.push_factor("x1", "x2"); @@ -207,7 +207,7 @@ TEST( GaussianFactorGraph, findMinimumSpanningTree ) // // SymbolicFactorGraph singletonGraph; // set singletons; -// boost::tie(singletonGraph, singletons) = G.removeSingletons(); +// std::tie(singletonGraph, singletons) = G.removeSingletons(); // // set singletons_excepted; singletons_excepted += "x1", "x2", "x5", "l1", "l3"; // CHECK(singletons_excepted == singletons); diff --git a/tests/testIterative.cpp b/tests/testIterative.cpp index 5519a7f04..5f981e2dd 100644 --- a/tests/testIterative.cpp +++ b/tests/testIterative.cpp @@ -62,7 +62,7 @@ TEST( Iterative, conjugateGradientDescent ) Matrix A; Vector b; Vector x0 = Z_6x1; - boost::tie(A, b) = fg.jacobian(); + std::tie(A, b) = fg.jacobian(); Vector expectedX = (Vector(6) << -0.1, 0.1, -0.1, -0.1, 0.1, -0.2).finished(); // Do conjugate gradient descent, System version diff --git a/tests/testSubgraphPreconditioner.cpp b/tests/testSubgraphPreconditioner.cpp index 8abd54795..5f43a0543 100644 --- a/tests/testSubgraphPreconditioner.cpp +++ b/tests/testSubgraphPreconditioner.cpp @@ -67,7 +67,7 @@ TEST(SubgraphPreconditioner, planarGraph) { // Check planar graph construction GaussianFactorGraph A; VectorValues xtrue; - boost::tie(A, xtrue) = planarGraph(3); + std::tie(A, xtrue) = planarGraph(3); LONGS_EQUAL(13, A.size()); LONGS_EQUAL(9, xtrue.size()); DOUBLES_EQUAL(0, error(A, xtrue), 1e-9); // check zero error for xtrue @@ -83,11 +83,11 @@ TEST(SubgraphPreconditioner, splitOffPlanarTree) { // Build a planar graph GaussianFactorGraph A; VectorValues xtrue; - boost::tie(A, xtrue) = planarGraph(3); + std::tie(A, xtrue) = planarGraph(3); // Get the spanning tree and constraints, and check their sizes GaussianFactorGraph T, C; - boost::tie(T, C) = splitOffPlanarTree(3, A); + std::tie(T, C) = splitOffPlanarTree(3, A); LONGS_EQUAL(9, T.size()); LONGS_EQUAL(4, C.size()); @@ -103,11 +103,11 @@ TEST(SubgraphPreconditioner, system) { GaussianFactorGraph Ab; VectorValues xtrue; size_t N = 3; - boost::tie(Ab, xtrue) = planarGraph(N); // A*x-b + std::tie(Ab, xtrue) = planarGraph(N); // A*x-b // Get the spanning tree and remaining graph GaussianFactorGraph Ab1, Ab2; // A1*x-b1 and A2*x-b2 - boost::tie(Ab1, Ab2) = splitOffPlanarTree(N, Ab); + std::tie(Ab1, Ab2) = splitOffPlanarTree(N, Ab); // Eliminate the spanning tree to build a prior const Ordering ord = planarOrdering(N); @@ -199,11 +199,11 @@ TEST(SubgraphPreconditioner, conjugateGradients) { GaussianFactorGraph Ab; VectorValues xtrue; size_t N = 3; - boost::tie(Ab, xtrue) = planarGraph(N); // A*x-b + std::tie(Ab, xtrue) = planarGraph(N); // A*x-b // Get the spanning tree GaussianFactorGraph Ab1, Ab2; // A1*x-b1 and A2*x-b2 - boost::tie(Ab1, Ab2) = splitOffPlanarTree(N, Ab); + std::tie(Ab1, Ab2) = splitOffPlanarTree(N, Ab); // Eliminate the spanning tree to build a prior GaussianBayesNet Rc1 = *Ab1.eliminateSequential(); // R1*x-c1 diff --git a/timing/timeGaussianFactor.cpp b/timing/timeGaussianFactor.cpp index feb738439..6994dba93 100644 --- a/timing/timeGaussianFactor.cpp +++ b/timing/timeGaussianFactor.cpp @@ -106,7 +106,7 @@ int main() JacobianFactor::shared_ptr factor; for(int i = 0; i < n; i++) - boost::tie(conditional, factor) = + std::tie(conditional, factor) = JacobianFactor(combined).eliminate(Ordering{_x2_}); long timeLog2 = clock(); diff --git a/timing/timeGaussianFactorGraph.cpp b/timing/timeGaussianFactorGraph.cpp index 574579f84..a5e9b6eed 100644 --- a/timing/timeGaussianFactorGraph.cpp +++ b/timing/timeGaussianFactorGraph.cpp @@ -63,7 +63,7 @@ double timePlanarSmootherEliminate(int N, bool old = true) { //double timePlanarSmootherJoinAug(int N, size_t reps) { // GaussianFactorGraph fgBase; // VectorValues config; -// boost::tie(fgBase,config) = planarGraph(N); +// std::tie(fgBase,config) = planarGraph(N); // Ordering ordering = fgBase.getOrdering(); // Symbol key = ordering.front(); // @@ -96,7 +96,7 @@ double timePlanarSmootherEliminate(int N, bool old = true) { //double timePlanarSmootherCombined(int N, size_t reps) { // GaussianFactorGraph fgBase; // VectorValues config; -// boost::tie(fgBase,config) = planarGraph(N); +// std::tie(fgBase,config) = planarGraph(N); // Ordering ordering = fgBase.getOrdering(); // Symbol key = ordering.front(); // diff --git a/timing/timeLago.cpp b/timing/timeLago.cpp index bb506de36..d1e10bbe2 100644 --- a/timing/timeLago.cpp +++ b/timing/timeLago.cpp @@ -37,7 +37,7 @@ int main(int argc, char *argv[]) { NonlinearFactorGraph::shared_ptr g; string inputFile = findExampleDataFile("w10000"); auto model = noiseModel::Diagonal::Sigmas((Vector(3) << 0.05, 0.05, 5.0 * M_PI / 180.0).finished()); - boost::tie(g, solution) = load2D(inputFile, model); + std::tie(g, solution) = load2D(inputFile, model); // add noise to create initial estimate Values initial;