Merge pull request #1410 from kartikarcot/verdant/boost-tuples
commit
165d6b16b1
|
@ -48,16 +48,16 @@ int main(const int argc, const char *argv[]) {
|
|||
Values::shared_ptr initial;
|
||||
bool is3D = false;
|
||||
if (kernelType.compare("none") == 0) {
|
||||
boost::tie(graph, initial) = readG2o(g2oFile, is3D);
|
||||
std::tie(graph, initial) = readG2o(g2oFile, is3D);
|
||||
}
|
||||
if (kernelType.compare("huber") == 0) {
|
||||
std::cout << "Using robust kernel: huber " << std::endl;
|
||||
boost::tie(graph, initial) =
|
||||
std::tie(graph, initial) =
|
||||
readG2o(g2oFile, is3D, KernelFunctionTypeHUBER);
|
||||
}
|
||||
if (kernelType.compare("tukey") == 0) {
|
||||
std::cout << "Using robust kernel: tukey " << std::endl;
|
||||
boost::tie(graph, initial) =
|
||||
std::tie(graph, initial) =
|
||||
readG2o(g2oFile, is3D, KernelFunctionTypeTUKEY);
|
||||
}
|
||||
|
||||
|
@ -90,7 +90,7 @@ int main(const int argc, const char *argv[]) {
|
|||
std::cout << "Writing results to file: " << outputFile << std::endl;
|
||||
NonlinearFactorGraph::shared_ptr graphNoKernel;
|
||||
Values::shared_ptr initial2;
|
||||
boost::tie(graphNoKernel, initial2) = readG2o(g2oFile);
|
||||
std::tie(graphNoKernel, initial2) = readG2o(g2oFile);
|
||||
writeG2o(*graphNoKernel, result, outputFile);
|
||||
std::cout << "done! " << std::endl;
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ int main (int argc, char** argv) {
|
|||
Values::shared_ptr initial;
|
||||
SharedDiagonal model = noiseModel::Diagonal::Sigmas((Vector(3) << 0.05, 0.05, 5.0 * M_PI / 180.0).finished());
|
||||
string graph_file = findExampleDataFile("w100.graph");
|
||||
boost::tie(graph, initial) = load2D(graph_file, model);
|
||||
std::tie(graph, initial) = load2D(graph_file, model);
|
||||
initial->print("Initial estimate:\n");
|
||||
|
||||
// Add a Gaussian prior on first poses
|
||||
|
|
|
@ -37,7 +37,7 @@ int main(const int argc, const char *argv[]) {
|
|||
|
||||
NonlinearFactorGraph::shared_ptr graph;
|
||||
Values::shared_ptr initial;
|
||||
boost::tie(graph, initial) = readG2o(g2oFile);
|
||||
std::tie(graph, initial) = readG2o(g2oFile);
|
||||
|
||||
// Add prior on the pose having index (key) = 0
|
||||
auto priorModel = noiseModel::Diagonal::Variances(Vector3(1e-6, 1e-6, 1e-8));
|
||||
|
@ -55,7 +55,7 @@ int main(const int argc, const char *argv[]) {
|
|||
std::cout << "Writing results to file: " << outputFile << std::endl;
|
||||
NonlinearFactorGraph::shared_ptr graphNoKernel;
|
||||
Values::shared_ptr initial2;
|
||||
boost::tie(graphNoKernel, initial2) = readG2o(g2oFile);
|
||||
std::tie(graphNoKernel, initial2) = readG2o(g2oFile);
|
||||
writeG2o(*graphNoKernel, estimateLago, outputFile);
|
||||
std::cout << "done! " << std::endl;
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ int main(const int argc, const char* argv[]) {
|
|||
NonlinearFactorGraph::shared_ptr graph;
|
||||
Values::shared_ptr initial;
|
||||
bool is3D = true;
|
||||
boost::tie(graph, initial) = readG2o(g2oFile, is3D);
|
||||
std::tie(graph, initial) = readG2o(g2oFile, is3D);
|
||||
|
||||
// Add prior on the first key
|
||||
auto priorModel = noiseModel::Diagonal::Variances(
|
||||
|
@ -67,7 +67,7 @@ int main(const int argc, const char* argv[]) {
|
|||
std::cout << "Writing results to file: " << outputFile << std::endl;
|
||||
NonlinearFactorGraph::shared_ptr graphNoKernel;
|
||||
Values::shared_ptr initial2;
|
||||
boost::tie(graphNoKernel, initial2) = readG2o(g2oFile);
|
||||
std::tie(graphNoKernel, initial2) = readG2o(g2oFile);
|
||||
writeG2o(*graphNoKernel, result, outputFile);
|
||||
std::cout << "done! " << std::endl;
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ int main(const int argc, const char *argv[]) {
|
|||
NonlinearFactorGraph::shared_ptr graph;
|
||||
Values::shared_ptr initial;
|
||||
bool is3D = true;
|
||||
boost::tie(graph, initial) = readG2o(g2oFile, is3D);
|
||||
std::tie(graph, initial) = readG2o(g2oFile, is3D);
|
||||
|
||||
bool add = false;
|
||||
Key firstKey = 8646911284551352320;
|
||||
|
|
|
@ -36,7 +36,7 @@ int main(const int argc, const char* argv[]) {
|
|||
NonlinearFactorGraph::shared_ptr graph;
|
||||
Values::shared_ptr initial;
|
||||
bool is3D = true;
|
||||
boost::tie(graph, initial) = readG2o(g2oFile, is3D);
|
||||
std::tie(graph, initial) = readG2o(g2oFile, is3D);
|
||||
|
||||
// Add prior on the first key
|
||||
auto priorModel = noiseModel::Diagonal::Variances(
|
||||
|
@ -66,7 +66,7 @@ int main(const int argc, const char* argv[]) {
|
|||
std::cout << "Writing results to file: " << outputFile << std::endl;
|
||||
NonlinearFactorGraph::shared_ptr graphNoKernel;
|
||||
Values::shared_ptr initial2;
|
||||
boost::tie(graphNoKernel, initial2) = readG2o(g2oFile);
|
||||
std::tie(graphNoKernel, initial2) = readG2o(g2oFile);
|
||||
writeG2o(*graphNoKernel, result, outputFile);
|
||||
std::cout << "done! " << std::endl;
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ int main(const int argc, const char* argv[]) {
|
|||
NonlinearFactorGraph::shared_ptr graph;
|
||||
Values::shared_ptr initial;
|
||||
bool is3D = true;
|
||||
boost::tie(graph, initial) = readG2o(g2oFile, is3D);
|
||||
std::tie(graph, initial) = readG2o(g2oFile, is3D);
|
||||
|
||||
// Add prior on the first key
|
||||
auto priorModel = noiseModel::Diagonal::Variances(
|
||||
|
@ -60,7 +60,7 @@ int main(const int argc, const char* argv[]) {
|
|||
std::cout << "Writing results to file: " << outputFile << std::endl;
|
||||
NonlinearFactorGraph::shared_ptr graphNoKernel;
|
||||
Values::shared_ptr initial2;
|
||||
boost::tie(graphNoKernel, initial2) = readG2o(g2oFile);
|
||||
std::tie(graphNoKernel, initial2) = readG2o(g2oFile);
|
||||
writeG2o(*graphNoKernel, initialization, outputFile);
|
||||
std::cout << "done! " << std::endl;
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ int main(const int argc, const char* argv[]) {
|
|||
NonlinearFactorGraph::shared_ptr graph;
|
||||
Values::shared_ptr initial;
|
||||
bool is3D = true;
|
||||
boost::tie(graph, initial) = readG2o(g2oFile, is3D);
|
||||
std::tie(graph, initial) = readG2o(g2oFile, is3D);
|
||||
|
||||
// Add prior on the first key
|
||||
auto priorModel = noiseModel::Diagonal::Variances(
|
||||
|
@ -66,7 +66,7 @@ int main(const int argc, const char* argv[]) {
|
|||
std::cout << "Writing results to file: " << outputFile << std::endl;
|
||||
NonlinearFactorGraph::shared_ptr graphNoKernel;
|
||||
Values::shared_ptr initial2;
|
||||
boost::tie(graphNoKernel, initial2) = readG2o(g2oFile);
|
||||
std::tie(graphNoKernel, initial2) = readG2o(g2oFile);
|
||||
writeG2o(*graphNoKernel, initialization, outputFile);
|
||||
std::cout << "done! " << std::endl;
|
||||
}
|
||||
|
|
|
@ -92,7 +92,7 @@ std::list<TimedOdometry> readOdometry() {
|
|||
|
||||
// load the ranges from TD
|
||||
// Time (sec) Sender / Antenna ID Receiver Node ID Range (m)
|
||||
using RangeTriple = boost::tuple<double, size_t, double>;
|
||||
using RangeTriple = std::tuple<double, size_t, double>;
|
||||
std::vector<RangeTriple> readTriples() {
|
||||
std::vector<RangeTriple> triples;
|
||||
std::string data_file = gtsam::findExampleDataFile("Plaza2_TD.txt");
|
||||
|
@ -166,7 +166,7 @@ int main(int argc, char** argv) {
|
|||
//--------------------------------- odometry loop --------------------------
|
||||
double t;
|
||||
Pose2 odometry;
|
||||
boost::tie(t, odometry) = timedOdometry;
|
||||
std::tie(t, odometry) = timedOdometry;
|
||||
|
||||
// add odometry factor
|
||||
newFactors.emplace_shared<gtsam::BetweenFactor<Pose2>>(i - 1, i, odometry,
|
||||
|
@ -178,10 +178,10 @@ int main(int argc, char** argv) {
|
|||
initial.insert(i, predictedPose);
|
||||
|
||||
// Check if there are range factors to be added
|
||||
while (k < K && t >= boost::get<0>(triples[k])) {
|
||||
size_t j = boost::get<1>(triples[k]);
|
||||
while (k < K && t >= std::get<0>(triples[k])) {
|
||||
size_t j = std::get<1>(triples[k]);
|
||||
Symbol landmark_key('L', j);
|
||||
double range = boost::get<2>(triples[k]);
|
||||
double range = std::get<2>(triples[k]);
|
||||
newFactors.emplace_shared<gtsam::RangeFactor<Pose2, Point2>>(
|
||||
i, landmark_key, range, rangeNoise);
|
||||
if (initializedLandmarks.count(landmark_key) == 0) {
|
||||
|
|
|
@ -103,7 +103,7 @@ int main(int argc, char* argv[]) {
|
|||
auto result = shonan.run(initial, pMin);
|
||||
|
||||
// Parse file again to set up translation problem, adding a prior
|
||||
boost::tie(inputGraph, posesInFile) = load2D(inputFile);
|
||||
std::tie(inputGraph, posesInFile) = load2D(inputFile);
|
||||
auto priorModel = noiseModel::Unit::Create(3);
|
||||
inputGraph->addPrior(0, posesInFile->at<Pose2>(0), priorModel);
|
||||
|
||||
|
@ -119,7 +119,7 @@ int main(int argc, char* argv[]) {
|
|||
auto result = shonan.run(initial, pMin);
|
||||
|
||||
// Parse file again to set up translation problem, adding a prior
|
||||
boost::tie(inputGraph, posesInFile) = load3D(inputFile);
|
||||
std::tie(inputGraph, posesInFile) = load3D(inputFile);
|
||||
auto priorModel = noiseModel::Unit::Create(6);
|
||||
inputGraph->addPrior(0, posesInFile->at<Pose3>(0), priorModel);
|
||||
|
||||
|
|
|
@ -49,8 +49,6 @@
|
|||
#include <boost/archive/binary_oarchive.hpp>
|
||||
#include <boost/program_options.hpp>
|
||||
#include <boost/range/algorithm/set_algorithm.hpp>
|
||||
#include <boost/range/adaptor/reversed.hpp>
|
||||
#include <boost/serialization/export.hpp>
|
||||
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
|
|
|
@ -23,7 +23,6 @@
|
|||
#include <Eigen/SVD>
|
||||
#include <Eigen/LU>
|
||||
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
#include <boost/tokenizer.hpp>
|
||||
#include <boost/format.hpp>
|
||||
|
||||
|
@ -251,7 +250,7 @@ pair<Matrix,Matrix> qr(const Matrix& A) {
|
|||
|
||||
// calculate the Householder vector v
|
||||
double beta; Vector vjm;
|
||||
boost::tie(beta,vjm) = house(xjm);
|
||||
std::tie(beta,vjm) = house(xjm);
|
||||
|
||||
// pad with zeros to get m-dimensional vector v
|
||||
for(size_t k = 0 ; k < m; k++)
|
||||
|
@ -269,13 +268,13 @@ pair<Matrix,Matrix> qr(const Matrix& A) {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
list<boost::tuple<Vector, double, double> >
|
||||
list<std::tuple<Vector, double, double> >
|
||||
weighted_eliminate(Matrix& A, Vector& b, const Vector& sigmas) {
|
||||
size_t m = A.rows(), n = A.cols(); // get size(A)
|
||||
size_t maxRank = min(m,n);
|
||||
|
||||
// create list
|
||||
list<boost::tuple<Vector, double, double> > results;
|
||||
list<std::tuple<Vector, double, double> > results;
|
||||
|
||||
Vector pseudo(m); // allocate storage for pseudo-inverse
|
||||
Vector weights = sigmas.array().square().inverse(); // calculate weights once
|
||||
|
@ -304,7 +303,7 @@ weighted_eliminate(Matrix& A, Vector& b, const Vector& sigmas) {
|
|||
|
||||
// construct solution (r, d, sigma)
|
||||
// TODO: avoid sqrt, store precision or at least variance
|
||||
results.push_back(boost::make_tuple(r, d, 1./sqrt(precision)));
|
||||
results.push_back(std::make_tuple(r, d, 1./sqrt(precision)));
|
||||
|
||||
// exit after rank exhausted
|
||||
if (results.size()>=maxRank) break;
|
||||
|
@ -565,7 +564,7 @@ void svd(const Matrix& A, Matrix& U, Vector& S, Matrix& V) {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
boost::tuple<int, double, Vector> DLT(const Matrix& A, double rank_tol) {
|
||||
std::tuple<int, double, Vector> DLT(const Matrix& A, double rank_tol) {
|
||||
|
||||
// Check size of A
|
||||
size_t n = A.rows(), p = A.cols(), m = min(n,p);
|
||||
|
@ -582,7 +581,7 @@ boost::tuple<int, double, Vector> DLT(const Matrix& A, double rank_tol) {
|
|||
|
||||
// Return rank, error, and corresponding column of V
|
||||
double error = m<p ? 0 : s(m-1);
|
||||
return boost::tuple<int, double, Vector>((int)rank, error, Vector(column(V, p-1)));
|
||||
return std::tuple<int, double, Vector>((int)rank, error, Vector(column(V, p-1)));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -26,7 +26,6 @@
|
|||
|
||||
#include <gtsam/base/OptionalJacobian.h>
|
||||
#include <gtsam/base/Vector.h>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
|
||||
#include <vector>
|
||||
|
||||
|
@ -307,7 +306,7 @@ GTSAM_EXPORT void inplace_QR(Matrix& A);
|
|||
* @param sigmas is a vector of the measurement standard deviation
|
||||
* @return list of r vectors, d and sigma
|
||||
*/
|
||||
GTSAM_EXPORT std::list<boost::tuple<Vector, double, double> >
|
||||
GTSAM_EXPORT std::list<std::tuple<Vector, double, double> >
|
||||
weighted_eliminate(Matrix& A, Vector& b, const Vector& sigmas);
|
||||
|
||||
/**
|
||||
|
@ -434,7 +433,7 @@ GTSAM_EXPORT void svd(const Matrix& A, Matrix& U, Vector& S, Matrix& V);
|
|||
* Returns rank of A, minimum error (singular value),
|
||||
* and corresponding eigenvector (column of V, with A=U*S*V')
|
||||
*/
|
||||
GTSAM_EXPORT boost::tuple<int, double, Vector>
|
||||
GTSAM_EXPORT std::tuple<int, double, Vector>
|
||||
DLT(const Matrix& A, double rank_tol = 1e-9);
|
||||
|
||||
/**
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
#include <gtsam/base/VectorSpace.h>
|
||||
#include <gtsam/base/testLie.h>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
#include <optional>
|
||||
|
@ -859,7 +858,7 @@ TEST(Matrix, qr )
|
|||
7.4536, 0, 00, 0, 0, 10.9545, 00, 0, 0, 0, 00, 0, 0, 0).finished();
|
||||
|
||||
Matrix Q, R;
|
||||
boost::tie(Q, R) = qr(A);
|
||||
std::tie(Q, R) = qr(A);
|
||||
EXPECT(assert_equal(expectedQ, Q, 1e-4));
|
||||
EXPECT(assert_equal(expectedR, R, 1e-4));
|
||||
EXPECT(assert_equal(A, Q*R, 1e-14));
|
||||
|
@ -911,7 +910,7 @@ TEST(Matrix, weighted_elimination )
|
|||
// perform elimination
|
||||
Matrix A1 = A;
|
||||
Vector b1 = b;
|
||||
std::list<boost::tuple<Vector, double, double> > solution =
|
||||
std::list<std::tuple<Vector, double, double> > solution =
|
||||
weighted_eliminate(A1, b1, sigmas);
|
||||
|
||||
// unpack and verify
|
||||
|
@ -919,7 +918,7 @@ TEST(Matrix, weighted_elimination )
|
|||
for (const auto& tuple : solution) {
|
||||
Vector r;
|
||||
double di, sigma;
|
||||
boost::tie(r, di, sigma) = tuple;
|
||||
std::tie(r, di, sigma) = tuple;
|
||||
EXPECT(assert_equal(r, expectedR.row(i))); // verify r
|
||||
DOUBLES_EQUAL(d(i), di, 1e-8); // verify d
|
||||
DOUBLES_EQUAL(newSigmas(i), sigma, 1e-5); // verify sigma
|
||||
|
@ -1146,7 +1145,7 @@ TEST(Matrix, DLT )
|
|||
int rank;
|
||||
double error;
|
||||
Vector actual;
|
||||
boost::tie(rank,error,actual) = DLT(A);
|
||||
std::tie(rank,error,actual) = DLT(A);
|
||||
Vector expected = (Vector(9) << -0.0, 0.2357, 0.4714, -0.2357, 0.0, - 0.4714,-0.4714, 0.4714, 0.0).finished();
|
||||
EXPECT_LONGS_EQUAL(8,rank);
|
||||
EXPECT_DOUBLES_EQUAL(0,error,1e-8);
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
#include <gtsam/base/VectorSpace.h>
|
||||
#include <gtsam/base/testLie.h>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
#include <iostream>
|
||||
|
||||
using namespace std;
|
||||
|
@ -156,7 +155,7 @@ TEST(Vector, weightedPseudoinverse )
|
|||
|
||||
// perform solve
|
||||
Vector actual; double precision;
|
||||
boost::tie(actual, precision) = weightedPseudoinverse(x, weights);
|
||||
std::tie(actual, precision) = weightedPseudoinverse(x, weights);
|
||||
|
||||
// construct expected
|
||||
Vector expected(2);
|
||||
|
@ -181,7 +180,7 @@ TEST(Vector, weightedPseudoinverse_constraint )
|
|||
Vector weights = sigmas.array().square().inverse();
|
||||
// perform solve
|
||||
Vector actual; double precision;
|
||||
boost::tie(actual, precision) = weightedPseudoinverse(x, weights);
|
||||
std::tie(actual, precision) = weightedPseudoinverse(x, weights);
|
||||
|
||||
// construct expected
|
||||
Vector expected(2);
|
||||
|
@ -199,7 +198,7 @@ TEST(Vector, weightedPseudoinverse_nan )
|
|||
Vector sigmas = (Vector(4) << 0.1, 0.1, 0., 0.).finished();
|
||||
Vector weights = sigmas.array().square().inverse();
|
||||
Vector pseudo; double precision;
|
||||
boost::tie(pseudo, precision) = weightedPseudoinverse(a, weights);
|
||||
std::tie(pseudo, precision) = weightedPseudoinverse(a, weights);
|
||||
|
||||
Vector expected = (Vector(4) << 1., 0., 0.,0.).finished();
|
||||
EXPECT(assert_equal(expected, pseudo));
|
||||
|
|
|
@ -268,5 +268,4 @@ namespace gtsam {
|
|||
// traits
|
||||
template <>
|
||||
struct traits<DecisionTreeFactor> : public Testable<DecisionTreeFactor> {};
|
||||
|
||||
} // namespace gtsam
|
||||
|
|
|
@ -20,8 +20,6 @@
|
|||
#include <gtsam/discrete/DiscreteConditional.h>
|
||||
#include <gtsam/inference/FactorGraph-inst.h>
|
||||
|
||||
#include <boost/range/adaptor/reversed.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
// Instantiate base class
|
||||
|
@ -58,8 +56,9 @@ DiscreteValues DiscreteBayesNet::sample() const {
|
|||
|
||||
DiscreteValues DiscreteBayesNet::sample(DiscreteValues result) const {
|
||||
// sample each node in turn in topological sort order (parents first)
|
||||
for (auto conditional : boost::adaptors::reverse(*this))
|
||||
conditional->sampleInPlace(&result);
|
||||
for (auto it = std::make_reverse_iterator(end()); it != std::make_reverse_iterator(begin()); ++it) {
|
||||
(*it)->sampleInPlace(&result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
#include <gtsam/discrete/DiscreteLookupDAG.h>
|
||||
#include <gtsam/discrete/DiscreteValues.h>
|
||||
|
||||
#include <iterator>
|
||||
#include <string>
|
||||
#include <utility>
|
||||
|
||||
|
@ -118,8 +119,10 @@ DiscreteLookupDAG DiscreteLookupDAG::FromBayesNet(
|
|||
|
||||
DiscreteValues DiscreteLookupDAG::argmax(DiscreteValues result) const {
|
||||
// Argmax each node in turn in topological sort order (parents first).
|
||||
for (auto lookupTable : boost::adaptors::reverse(*this))
|
||||
lookupTable->argmaxInPlace(&result);
|
||||
for (auto it = std::make_reverse_iterator(end()); it != std::make_reverse_iterator(begin()); ++it) {
|
||||
// dereference to get the sharedFactor to the lookup table
|
||||
(*it)->argmaxInPlace(&result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/* ************************************************************************** */
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
|
||||
#include <gtsam/discrete/DiscreteValues.h>
|
||||
|
||||
#include <boost/range/combine.hpp>
|
||||
#include <sstream>
|
||||
|
||||
using std::cout;
|
||||
|
@ -39,8 +38,10 @@ void DiscreteValues::print(const string& s,
|
|||
/* ************************************************************************ */
|
||||
bool DiscreteValues::equals(const DiscreteValues& x, double tol) const {
|
||||
if (this->size() != x.size()) return false;
|
||||
for (const auto values : boost::combine(*this, x)) {
|
||||
if (values.get<0>() != values.get<1>()) return false;
|
||||
auto it1 = x.begin();
|
||||
auto it2 = this->begin();
|
||||
for (; it1 != x.end(); ++it1, ++it2) {
|
||||
if (it1->first != it2->first || it1->second != it2->second) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -111,7 +111,7 @@ TEST(DiscreteFactorGraph, test) {
|
|||
frontalKeys += Key(0);
|
||||
DiscreteConditional::shared_ptr conditional;
|
||||
DecisionTreeFactor::shared_ptr newFactor;
|
||||
boost::tie(conditional, newFactor) = EliminateDiscrete(graph, frontalKeys);
|
||||
std::tie(conditional, newFactor) = EliminateDiscrete(graph, frontalKeys);
|
||||
|
||||
// Check Conditional
|
||||
CHECK(conditional);
|
||||
|
@ -130,7 +130,7 @@ TEST(DiscreteFactorGraph, test) {
|
|||
DiscreteEliminationTree etree(graph, ordering);
|
||||
DiscreteBayesNet::shared_ptr actual;
|
||||
DiscreteFactorGraph::shared_ptr remainingGraph;
|
||||
boost::tie(actual, remainingGraph) = etree.eliminate(&EliminateDiscrete);
|
||||
std::tie(actual, remainingGraph) = etree.eliminate(&EliminateDiscrete);
|
||||
|
||||
// Check Bayes net
|
||||
DiscreteBayesNet expectedBayesNet;
|
||||
|
|
|
@ -170,13 +170,13 @@ Vector3 Rot3::xyz(OptionalJacobian<3, 3> H) const {
|
|||
#endif
|
||||
|
||||
Matrix39 qHm;
|
||||
boost::tie(I, q) = RQ(m, qHm);
|
||||
std::tie(I, q) = RQ(m, qHm);
|
||||
|
||||
// TODO : Explore whether this expression can be optimized as both
|
||||
// qHm and mH are super-sparse
|
||||
*H = qHm * mH;
|
||||
} else
|
||||
boost::tie(I, q) = RQ(matrix());
|
||||
std::tie(I, q) = RQ(matrix());
|
||||
return q;
|
||||
}
|
||||
|
||||
|
|
|
@ -510,7 +510,7 @@ TEST( Rot3, RQ)
|
|||
// Try RQ on a pure rotation
|
||||
Matrix actualK;
|
||||
Vector actual;
|
||||
boost::tie(actualK, actual) = RQ(R.matrix());
|
||||
std::tie(actualK, actual) = RQ(R.matrix());
|
||||
Vector expected = Vector3(0.14715, 0.385821, 0.231671);
|
||||
CHECK(assert_equal(I_3x3,actualK));
|
||||
CHECK(assert_equal(expected,actual,1e-6));
|
||||
|
@ -531,7 +531,7 @@ TEST( Rot3, RQ)
|
|||
// Try RQ to recover calibration from 3*3 sub-block of projection matrix
|
||||
Matrix K = (Matrix(3, 3) << 500.0, 0.0, 320.0, 0.0, 500.0, 240.0, 0.0, 0.0, 1.0).finished();
|
||||
Matrix A = K * R.matrix();
|
||||
boost::tie(actualK, actual) = RQ(A);
|
||||
std::tie(actualK, actual) = RQ(A);
|
||||
CHECK(assert_equal(K,actualK));
|
||||
CHECK(assert_equal(expected,actual,1e-6));
|
||||
}
|
||||
|
|
|
@ -51,7 +51,7 @@ Vector4 triangulateHomogeneousDLT(
|
|||
int rank;
|
||||
double error;
|
||||
Vector v;
|
||||
boost::tie(rank, error, v) = DLT(A, rank_tol);
|
||||
std::tie(rank, error, v) = DLT(A, rank_tol);
|
||||
|
||||
if (rank < 3) throw(TriangulationUnderconstrainedException());
|
||||
|
||||
|
@ -82,7 +82,7 @@ Vector4 triangulateHomogeneousDLT(
|
|||
int rank;
|
||||
double error;
|
||||
Vector v;
|
||||
boost::tie(rank, error, v) = DLT(A, rank_tol);
|
||||
std::tie(rank, error, v) = DLT(A, rank_tol);
|
||||
|
||||
if (rank < 3) throw(TriangulationUnderconstrainedException());
|
||||
|
||||
|
|
|
@ -195,7 +195,7 @@ Point3 triangulateNonlinear(const std::vector<Pose3>& poses,
|
|||
// Create a factor graph and initial values
|
||||
Values values;
|
||||
NonlinearFactorGraph graph;
|
||||
boost::tie(graph, values) = triangulationGraph<CALIBRATION> //
|
||||
std::tie(graph, values) = triangulationGraph<CALIBRATION> //
|
||||
(poses, sharedCal, measurements, Symbol('p', 0), initialEstimate, model);
|
||||
|
||||
return optimize(graph, values, Symbol('p', 0));
|
||||
|
@ -217,7 +217,7 @@ Point3 triangulateNonlinear(
|
|||
// Create a factor graph and initial values
|
||||
Values values;
|
||||
NonlinearFactorGraph graph;
|
||||
boost::tie(graph, values) = triangulationGraph<CAMERA> //
|
||||
std::tie(graph, values) = triangulationGraph<CAMERA> //
|
||||
(cameras, measurements, Symbol('p', 0), initialEstimate, model);
|
||||
|
||||
return optimize(graph, values, Symbol('p', 0));
|
||||
|
|
|
@ -102,7 +102,7 @@ struct HybridConstructorTraversalData {
|
|||
keyAsOrdering.push_back(node->key);
|
||||
SymbolicConditional::shared_ptr conditional;
|
||||
SymbolicFactor::shared_ptr separatorFactor;
|
||||
boost::tie(conditional, separatorFactor) =
|
||||
std::tie(conditional, separatorFactor) =
|
||||
internal::EliminateSymbolic(symbolicFactors, keyAsOrdering);
|
||||
|
||||
// Store symbolic elimination results in the parent
|
||||
|
@ -129,9 +129,9 @@ struct HybridConstructorTraversalData {
|
|||
// Check if we should merge the i^th child
|
||||
if (nrParents + nrFrontals == childConditionals[i]->nrParents()) {
|
||||
const bool myType =
|
||||
data.discreteKeys.exists(conditional->frontals()[0]);
|
||||
data.discreteKeys.exists(conditional->frontals().front());
|
||||
const bool theirType =
|
||||
data.discreteKeys.exists(childConditionals[i]->frontals()[0]);
|
||||
data.discreteKeys.exists(childConditionals[i]->frontals().front());
|
||||
|
||||
if (myType == theirType) {
|
||||
// Increment number of frontal variables
|
||||
|
|
|
@ -21,7 +21,6 @@
|
|||
#include <gtsam/inference/BayesNet.h>
|
||||
#include <gtsam/inference/FactorGraph-inst.h>
|
||||
|
||||
#include <boost/range/adaptor/reversed.hpp>
|
||||
#include <fstream>
|
||||
#include <string>
|
||||
|
||||
|
@ -56,7 +55,9 @@ void BayesNet<CONDITIONAL>::dot(std::ostream& os,
|
|||
os << "\n";
|
||||
|
||||
// Reverse order as typically Bayes nets stored in reverse topological sort.
|
||||
for (auto conditional : boost::adaptors::reverse(*this)) {
|
||||
for (auto it = std::make_reverse_iterator(this->end());
|
||||
it != std::make_reverse_iterator(this->begin()); ++it) {
|
||||
const auto& conditional = *it;
|
||||
auto frontals = conditional->frontals();
|
||||
const Key me = frontals.front();
|
||||
auto parents = conditional->parents();
|
||||
|
|
|
@ -361,7 +361,7 @@ namespace gtsam {
|
|||
}
|
||||
// Factor into C1\B | B.
|
||||
sharedFactorGraph temp_remaining;
|
||||
boost::tie(p_C1_B, temp_remaining) =
|
||||
std::tie(p_C1_B, temp_remaining) =
|
||||
FactorGraphType(p_C1_Bred).eliminatePartialMultifrontal(Ordering(C1_minus_B), function);
|
||||
}
|
||||
std::shared_ptr<typename EliminationTraitsType::BayesTreeType> p_C2_B; {
|
||||
|
@ -373,7 +373,7 @@ namespace gtsam {
|
|||
}
|
||||
// Factor into C2\B | B.
|
||||
sharedFactorGraph temp_remaining;
|
||||
boost::tie(p_C2_B, temp_remaining) =
|
||||
std::tie(p_C2_B, temp_remaining) =
|
||||
FactorGraphType(p_C2_Bred).eliminatePartialMultifrontal(Ordering(C2_minus_B), function);
|
||||
}
|
||||
gttoc(Full_root_factoring);
|
||||
|
|
|
@ -70,12 +70,33 @@ namespace gtsam {
|
|||
/// Typedef to this class
|
||||
typedef Conditional<FACTOR,DERIVEDCONDITIONAL> This;
|
||||
|
||||
|
||||
public:
|
||||
/** A mini implementation of an iterator range, to share const
|
||||
* views of frontals and parents. */
|
||||
typedef std::pair<typename FACTOR::const_iterator, typename FACTOR::const_iterator> ConstFactorRange;
|
||||
struct ConstFactorRangeIterator {
|
||||
ConstFactorRange range_;
|
||||
// Delete default constructor
|
||||
ConstFactorRangeIterator() = delete;
|
||||
ConstFactorRangeIterator(ConstFactorRange const& x) : range_(x) {}
|
||||
// Implement begin and end for iteration
|
||||
typename FACTOR::const_iterator begin() const { return range_.first; }
|
||||
typename FACTOR::const_iterator end() const { return range_.second; }
|
||||
size_t size() const { return std::distance(range_.first, range_.second); }
|
||||
const auto& front() const { return *begin(); }
|
||||
// == operator overload for comparison with another iterator
|
||||
template<class OTHER>
|
||||
bool operator==(const OTHER& rhs) const {
|
||||
return std::equal(begin(), end(), rhs.begin());
|
||||
}
|
||||
};
|
||||
|
||||
/** View of the frontal keys (call frontals()) */
|
||||
typedef boost::iterator_range<typename FACTOR::const_iterator> Frontals;
|
||||
typedef ConstFactorRangeIterator Frontals;
|
||||
|
||||
/** View of the separator keys (call parents()) */
|
||||
typedef boost::iterator_range<typename FACTOR::const_iterator> Parents;
|
||||
typedef ConstFactorRangeIterator Parents;
|
||||
|
||||
protected:
|
||||
/// @name Standard Constructors
|
||||
|
@ -121,10 +142,10 @@ namespace gtsam {
|
|||
}
|
||||
|
||||
/** return a view of the frontal keys */
|
||||
Frontals frontals() const { return boost::make_iterator_range(beginFrontals(), endFrontals()); }
|
||||
Frontals frontals() const { return ConstFactorRangeIterator({beginFrontals(), endFrontals()});}
|
||||
|
||||
/** return a view of the parent keys */
|
||||
Parents parents() const { return boost::make_iterator_range(beginParents(), endParents()); }
|
||||
Parents parents() const { return ConstFactorRangeIterator({beginParents(), endParents()}); }
|
||||
|
||||
/**
|
||||
* All conditional types need to implement a `logProbability` function, for which
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
|
||||
#include <gtsam/inference/EliminateableFactorGraph.h>
|
||||
#include <gtsam/inference/inferenceExceptions.h>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
@ -75,7 +74,7 @@ namespace gtsam {
|
|||
EliminationTreeType etree(asDerived(), (*variableIndex).get(), ordering);
|
||||
std::shared_ptr<BayesNetType> bayesNet;
|
||||
std::shared_ptr<FactorGraphType> factorGraph;
|
||||
boost::tie(bayesNet,factorGraph) = etree.eliminate(function);
|
||||
std::tie(bayesNet,factorGraph) = etree.eliminate(function);
|
||||
// If any factors are remaining, the ordering was incomplete
|
||||
if(!factorGraph->empty())
|
||||
throw InconsistentEliminationRequested();
|
||||
|
@ -139,7 +138,7 @@ namespace gtsam {
|
|||
JunctionTreeType junctionTree(etree);
|
||||
std::shared_ptr<BayesTreeType> bayesTree;
|
||||
std::shared_ptr<FactorGraphType> factorGraph;
|
||||
boost::tie(bayesTree,factorGraph) = junctionTree.eliminate(function);
|
||||
std::tie(bayesTree,factorGraph) = junctionTree.eliminate(function);
|
||||
// If any factors are remaining, the ordering was incomplete
|
||||
if(!factorGraph->empty())
|
||||
throw InconsistentEliminationRequested();
|
||||
|
@ -277,7 +276,7 @@ namespace gtsam {
|
|||
// in the order requested.
|
||||
std::shared_ptr<BayesTreeType> bayesTree;
|
||||
std::shared_ptr<FactorGraphType> factorGraph;
|
||||
boost::tie(bayesTree,factorGraph) =
|
||||
std::tie(bayesTree,factorGraph) =
|
||||
eliminatePartialMultifrontal(marginalizedVariableOrdering, function, variableIndex);
|
||||
|
||||
if(const Ordering* varsAsOrdering = boost::get<const Ordering&>(&variables))
|
||||
|
@ -344,7 +343,7 @@ namespace gtsam {
|
|||
// in the order requested.
|
||||
std::shared_ptr<BayesTreeType> bayesTree;
|
||||
std::shared_ptr<FactorGraphType> factorGraph;
|
||||
boost::tie(bayesTree,factorGraph) =
|
||||
std::tie(bayesTree,factorGraph) =
|
||||
eliminatePartialMultifrontal(marginalizedVariableOrdering, function, variableIndex);
|
||||
|
||||
if(const Ordering* varsAsOrdering = boost::get<const Ordering&>(&variables))
|
||||
|
|
|
@ -85,7 +85,7 @@ struct ConstructorTraversalData {
|
|||
keyAsOrdering.push_back(ETreeNode->key);
|
||||
SymbolicConditional::shared_ptr myConditional;
|
||||
SymbolicFactor::shared_ptr mySeparatorFactor;
|
||||
boost::tie(myConditional, mySeparatorFactor) = internal::EliminateSymbolic(
|
||||
std::tie(myConditional, mySeparatorFactor) = internal::EliminateSymbolic(
|
||||
symbolicFactors, keyAsOrdering);
|
||||
|
||||
// Store symbolic elimination results in the parent
|
||||
|
|
|
@ -24,7 +24,6 @@
|
|||
#include <gtsam/base/timing.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
|
@ -111,7 +110,7 @@ VariableSlots::VariableSlots(const FG& factorGraph)
|
|||
// the array entry for each factor that will indicate the factor
|
||||
// does not involve the variable.
|
||||
iterator thisVarSlots; bool inserted;
|
||||
boost::tie(thisVarSlots, inserted) = this->insert(std::make_pair(involvedVariable, FastVector<size_t>()));
|
||||
std::tie(thisVarSlots, inserted) = this->insert(std::make_pair(involvedVariable, FastVector<size_t>()));
|
||||
if(inserted)
|
||||
thisVarSlots->second.resize(factorGraph.nrFactors(), Empty);
|
||||
thisVarSlots->second[jointFactorPos] = factorVarSlot;
|
||||
|
|
|
@ -54,7 +54,7 @@ std::list<KEY> predecessorMap2Keys(const PredecessorMap<KEY>& p_map) {
|
|||
SGraph<KEY> g;
|
||||
SVertex root;
|
||||
std::map<KEY, SVertex> key2vertex;
|
||||
boost::tie(g, root, key2vertex) = gtsam::predecessorMap2Graph<SGraph<KEY>, SVertex, KEY>(p_map);
|
||||
std::tie(g, root, key2vertex) = gtsam::predecessorMap2Graph<SGraph<KEY>, SVertex, KEY>(p_map);
|
||||
|
||||
// breadth first visit on the graph
|
||||
std::list<KEY> keys;
|
||||
|
@ -114,7 +114,7 @@ SDGraph<KEY> toBoostGraph(const G& graph) {
|
|||
|
||||
/* ************************************************************************* */
|
||||
template<class G, class V, class KEY>
|
||||
boost::tuple<G, V, std::map<KEY,V> >
|
||||
std::tuple<G, V, std::map<KEY,V> >
|
||||
predecessorMap2Graph(const PredecessorMap<KEY>& p_map) {
|
||||
|
||||
G g;
|
||||
|
@ -146,7 +146,7 @@ predecessorMap2Graph(const PredecessorMap<KEY>& p_map) {
|
|||
if (!foundRoot)
|
||||
throw std::invalid_argument("predecessorMap2Graph: invalid predecessor map!");
|
||||
else
|
||||
return boost::tuple<G, V, std::map<KEY, V> >(g, root, key2vertex);
|
||||
return std::tuple<G, V, std::map<KEY, V> >(g, root, key2vertex);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -185,7 +185,7 @@ std::shared_ptr<Values> composePoses(const G& graph, const PredecessorMap<KEY>&
|
|||
PoseGraph g;
|
||||
PoseVertex root;
|
||||
std::map<KEY, PoseVertex> key2vertex;
|
||||
boost::tie(g, root, key2vertex) =
|
||||
std::tie(g, root, key2vertex) =
|
||||
predecessorMap2Graph<PoseGraph, PoseVertex, KEY>(tree);
|
||||
|
||||
// attach the relative poses to the edges
|
||||
|
@ -207,8 +207,8 @@ std::shared_ptr<Values> composePoses(const G& graph, const PredecessorMap<KEY>&
|
|||
PoseVertex v2 = key2vertex.find(key2)->second;
|
||||
|
||||
POSE l1Xl2 = factor->measured();
|
||||
boost::tie(edge12, found1) = boost::edge(v1, v2, g);
|
||||
boost::tie(edge21, found2) = boost::edge(v2, v1, g);
|
||||
std::tie(edge12, found1) = boost::edge(v1, v2, g);
|
||||
std::tie(edge21, found2) = boost::edge(v2, v1, g);
|
||||
if (found1 && found2) throw std::invalid_argument ("composePoses: invalid spanning tree");
|
||||
if (!found1 && !found2) continue;
|
||||
if (found1)
|
||||
|
|
|
@ -83,7 +83,7 @@ namespace gtsam {
|
|||
* V = Vertex type
|
||||
*/
|
||||
template<class G, class V, class KEY>
|
||||
boost::tuple<G, V, std::map<KEY,V> > predecessorMap2Graph(const PredecessorMap<KEY>& p_map);
|
||||
std::tuple<G, V, std::map<KEY,V> > predecessorMap2Graph(const PredecessorMap<KEY>& p_map);
|
||||
|
||||
/**
|
||||
* Compose the poses by following the chain specified by the spanning tree
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
* @author Christian Potthast
|
||||
*/
|
||||
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
#include <gtsam/linear/Errors.h>
|
||||
#include <gtsam/linear/VectorValues.h>
|
||||
|
||||
|
@ -28,7 +27,7 @@ namespace gtsam {
|
|||
/* ************************************************************************* */
|
||||
Errors createErrors(const VectorValues& V) {
|
||||
Errors result;
|
||||
for (const Vector& e : V | boost::adaptors::map_values) {
|
||||
for (const auto& [key, e] : V) {
|
||||
result.push_back(e);
|
||||
}
|
||||
return result;
|
||||
|
|
|
@ -20,8 +20,8 @@
|
|||
#include <gtsam/linear/GaussianBayesNet.h>
|
||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||
|
||||
#include <boost/range/adaptor/reversed.hpp>
|
||||
#include <fstream>
|
||||
#include <iterator>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
@ -50,11 +50,11 @@ namespace gtsam {
|
|||
VectorValues solution = given;
|
||||
// (R*x)./sigmas = y by solving x=inv(R)*(y.*sigmas)
|
||||
// solve each node in reverse topological sort order (parents first)
|
||||
for (auto cg : boost::adaptors::reverse(*this)) {
|
||||
for (auto it = std::make_reverse_iterator(end()); it != std::make_reverse_iterator(begin()); ++it) {
|
||||
// i^th part of R*x=y, x=inv(R)*y
|
||||
// (Rii*xi + R_i*x(i+1:))./si = yi =>
|
||||
// xi = inv(Rii)*(yi.*si - R_i*x(i+1:))
|
||||
solution.insert(cg->solve(solution));
|
||||
solution.insert((*it)->solve(solution));
|
||||
}
|
||||
return solution;
|
||||
}
|
||||
|
@ -69,8 +69,8 @@ namespace gtsam {
|
|||
std::mt19937_64* rng) const {
|
||||
VectorValues result(given);
|
||||
// sample each node in reverse topological sort order (parents first)
|
||||
for (auto cg : boost::adaptors::reverse(*this)) {
|
||||
const VectorValues sampled = cg->sample(result, rng);
|
||||
for (auto it = std::make_reverse_iterator(end()); it != std::make_reverse_iterator(begin()); ++it) {
|
||||
const VectorValues sampled = (*it)->sample(result, rng);
|
||||
result.insert(sampled);
|
||||
}
|
||||
return result;
|
||||
|
@ -131,8 +131,8 @@ namespace gtsam {
|
|||
VectorValues result;
|
||||
// TODO this looks pretty sketchy. result is passed as the parents argument
|
||||
// as it's filled up by solving the gaussian conditionals.
|
||||
for (auto cg: boost::adaptors::reverse(*this)) {
|
||||
result.insert(cg->solveOtherRHS(result, rhs));
|
||||
for (auto it = std::make_reverse_iterator(end()); it != std::make_reverse_iterator(begin()); ++it) {
|
||||
result.insert((*it)->solveOtherRHS(result, rhs));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -30,19 +30,12 @@
|
|||
#include <gtsam/base/timing.h>
|
||||
|
||||
#include <boost/format.hpp>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
#include <boost/range/adaptor/transformed.hpp>
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
#include <boost/range/algorithm/copy.hpp>
|
||||
|
||||
#include <sstream>
|
||||
#include <limits>
|
||||
#include "gtsam/base/Vector.h"
|
||||
|
||||
using namespace std;
|
||||
namespace br {
|
||||
using namespace boost::range;
|
||||
using namespace boost::adaptors;
|
||||
}
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
@ -144,12 +137,20 @@ namespace {
|
|||
DenseIndex _getSizeHF(const Vector& m) {
|
||||
return m.size();
|
||||
}
|
||||
|
||||
std::vector<DenseIndex> _getSizeHFVec(const std::vector<Vector>& m) {
|
||||
std::vector<DenseIndex> dims;
|
||||
for (const Vector& v : m) {
|
||||
dims.push_back(v.size());
|
||||
}
|
||||
return dims;
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
HessianFactor::HessianFactor(const KeyVector& js,
|
||||
const std::vector<Matrix>& Gs, const std::vector<Vector>& gs, double f) :
|
||||
GaussianFactor(js), info_(gs | br::transformed(&_getSizeHF), true) {
|
||||
GaussianFactor(js), info_(_getSizeHFVec(gs), true) {
|
||||
// Get the number of variables
|
||||
size_t variable_count = js.size();
|
||||
|
||||
|
@ -417,7 +418,7 @@ void HessianFactor::multiplyHessianAdd(double alpha, const VectorValues& x,
|
|||
for (DenseIndex i = 0; i < (DenseIndex) size(); ++i) {
|
||||
bool didNotExist;
|
||||
VectorValues::iterator it;
|
||||
boost::tie(it, didNotExist) = yvalues.tryInsert(keys_[i], Vector());
|
||||
std::tie(it, didNotExist) = yvalues.tryInsert(keys_[i], Vector());
|
||||
if (didNotExist)
|
||||
it->second = alpha * y[i]; // init
|
||||
else
|
||||
|
|
|
@ -21,7 +21,6 @@
|
|||
#include <gtsam/inference/Ordering.h>
|
||||
#include <gtsam/base/Vector.h>
|
||||
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
#include <memory>
|
||||
|
||||
#include <iosfwd>
|
||||
|
|
|
@ -32,10 +32,6 @@
|
|||
#include <gtsam/base/cholesky.h>
|
||||
|
||||
#include <boost/format.hpp>
|
||||
#include <boost/array.hpp>
|
||||
#include <boost/range/algorithm/copy.hpp>
|
||||
#include <boost/range/adaptor/indirected.hpp>
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
|
||||
#include <cmath>
|
||||
#include <sstream>
|
||||
|
@ -102,7 +98,7 @@ JacobianFactor::JacobianFactor(const HessianFactor& factor)
|
|||
// Do Cholesky to get a Jacobian
|
||||
size_t maxrank;
|
||||
bool success;
|
||||
boost::tie(maxrank, success) = choleskyCareful(Ab_.matrix());
|
||||
std::tie(maxrank, success) = choleskyCareful(Ab_.matrix());
|
||||
|
||||
// Check that Cholesky succeeded OR it managed to factor the full Hessian.
|
||||
// THe latter case occurs with non-positive definite matrices arising from QP.
|
||||
|
@ -122,7 +118,7 @@ JacobianFactor::JacobianFactor(const HessianFactor& factor)
|
|||
/* ************************************************************************* */
|
||||
// Helper functions for combine constructor
|
||||
namespace {
|
||||
boost::tuple<FastVector<DenseIndex>, DenseIndex, DenseIndex> _countDims(
|
||||
std::tuple<FastVector<DenseIndex>, DenseIndex, DenseIndex> _countDims(
|
||||
const FastVector<JacobianFactor::shared_ptr>& factors,
|
||||
const FastVector<VariableSlots::const_iterator>& variableSlots) {
|
||||
gttic(countDims);
|
||||
|
@ -188,7 +184,7 @@ boost::tuple<FastVector<DenseIndex>, DenseIndex, DenseIndex> _countDims(
|
|||
}
|
||||
#endif
|
||||
|
||||
return boost::make_tuple(varDims, m, n);
|
||||
return std::make_tuple(varDims, m, n);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -221,16 +217,16 @@ void JacobianFactor::JacobianFactorHelper(const GaussianFactorGraph& graph,
|
|||
// Count dimensions
|
||||
FastVector<DenseIndex> varDims;
|
||||
DenseIndex m, n;
|
||||
boost::tie(varDims, m, n) = _countDims(jacobians, orderedSlots);
|
||||
std::tie(varDims, m, n) = _countDims(jacobians, orderedSlots);
|
||||
|
||||
// Allocate matrix and copy keys in order
|
||||
gttic(allocate);
|
||||
Ab_ = VerticalBlockMatrix(varDims, m, true); // Allocate augmented matrix
|
||||
Base::keys_.resize(orderedSlots.size());
|
||||
boost::range::copy(
|
||||
// Get variable keys
|
||||
orderedSlots | boost::adaptors::indirected | boost::adaptors::map_keys,
|
||||
Base::keys_.begin());
|
||||
// Copy keys in order
|
||||
std::transform(orderedSlots.begin(), orderedSlots.end(),
|
||||
Base::keys_.begin(),
|
||||
[](const VariableSlots::const_iterator& it) {return it->first;});
|
||||
gttoc(allocate);
|
||||
|
||||
// Loop over slots in combined factor and copy blocks from source factors
|
||||
|
|
|
@ -480,7 +480,7 @@ SharedDiagonal Constrained::QR(Matrix& Ab) const {
|
|||
const size_t maxRank = min(m, n);
|
||||
|
||||
// create storage for [R d]
|
||||
typedef boost::tuple<size_t, Matrix, double> Triple;
|
||||
typedef std::tuple<size_t, Matrix, double> Triple;
|
||||
list<Triple> Rd;
|
||||
|
||||
Matrix rd(1, n + 1); // and for row of R
|
||||
|
@ -506,7 +506,7 @@ SharedDiagonal Constrained::QR(Matrix& Ab) const {
|
|||
rd = Ab.row(*constraint_row);
|
||||
|
||||
// Construct solution (r, d, sigma)
|
||||
Rd.push_back(boost::make_tuple(j, rd, kInfinity));
|
||||
Rd.push_back(std::make_tuple(j, rd, kInfinity));
|
||||
|
||||
// exit after rank exhausted
|
||||
if (Rd.size() >= maxRank)
|
||||
|
@ -552,7 +552,7 @@ SharedDiagonal Constrained::QR(Matrix& Ab) const {
|
|||
rd.block(0, j + 1, 1, n - j) = pseudo.transpose() * Ab.block(0, j + 1, m, n - j);
|
||||
|
||||
// construct solution (r, d, sigma)
|
||||
Rd.push_back(boost::make_tuple(j, rd, precision));
|
||||
Rd.push_back(std::make_tuple(j, rd, precision));
|
||||
} else {
|
||||
// If precision is zero, no information on this column
|
||||
// This is actually not limited to constraints, could happen in Gaussian::QR
|
||||
|
@ -577,9 +577,9 @@ SharedDiagonal Constrained::QR(Matrix& Ab) const {
|
|||
bool mixed = false;
|
||||
Ab.setZero(); // make sure we don't look below
|
||||
for (const Triple& t: Rd) {
|
||||
const size_t& j = t.get<0>();
|
||||
const Matrix& rd = t.get<1>();
|
||||
precisions(i) = t.get<2>();
|
||||
const size_t& j = std::get<0>(t);
|
||||
const Matrix& rd = std::get<1>(t);
|
||||
precisions(i) = std::get<2>(t);
|
||||
if (std::isinf(precisions(i)))
|
||||
mixed = true;
|
||||
Ab.block(i, j, 1, n + 1 - j) = rd.block(0, j, 1, n + 1 - j);
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
#include <gtsam/linear/NoiseModel.h>
|
||||
#include <memory>
|
||||
#include <boost/algorithm/string.hpp>
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
|
||||
|
@ -145,8 +144,9 @@ void BlockJacobiPreconditioner::build(
|
|||
|
||||
/* getting the block diagonals over the factors */
|
||||
std::map<Key, Matrix> hessianMap =gfg.hessianBlockDiagonal();
|
||||
for (const Matrix& hessian: hessianMap | boost::adaptors::map_values)
|
||||
for (const auto& [key, hessian]: hessianMap) {
|
||||
blocks.push_back(hessian);
|
||||
}
|
||||
|
||||
/* if necessary, allocating the memory for cacheing the factorization results */
|
||||
if ( nnz > bufferSize_ ) {
|
||||
|
|
|
@ -25,7 +25,6 @@
|
|||
#include <gtsam/base/Vector.h>
|
||||
|
||||
#include <boost/algorithm/string.hpp>
|
||||
#include <boost/range/adaptor/reversed.hpp>
|
||||
|
||||
#include <stdexcept>
|
||||
|
||||
|
@ -205,7 +204,8 @@ void SubgraphPreconditioner::solve(const Vector &y, Vector &x) const {
|
|||
assert(x.size() == y.size());
|
||||
|
||||
/* back substitute */
|
||||
for (const auto &cg : boost::adaptors::reverse(Rc1_)) {
|
||||
for (auto it = std::make_reverse_iterator(Rc1_.end()); it != std::make_reverse_iterator(Rc1_.begin()); ++it) {
|
||||
auto& cg = *it;
|
||||
/* collect a subvector of x that consists of the parents of cg (S) */
|
||||
const KeyVector parentKeys(cg->beginParents(), cg->endParents());
|
||||
const KeyVector frontalKeys(cg->beginFrontals(), cg->endFrontals());
|
||||
|
|
|
@ -19,20 +19,12 @@
|
|||
#include <gtsam/linear/VectorValues.h>
|
||||
|
||||
#include <boost/bind/bind.hpp>
|
||||
#include <boost/range/combine.hpp>
|
||||
#include <boost/range/numeric.hpp>
|
||||
#include <boost/range/adaptor/transformed.hpp>
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
|
||||
using namespace std;
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
using boost::combine;
|
||||
using boost::adaptors::transformed;
|
||||
using boost::adaptors::map_values;
|
||||
using boost::accumulate;
|
||||
|
||||
/* ************************************************************************ */
|
||||
VectorValues::VectorValues(const VectorValues& first, const VectorValues& second)
|
||||
{
|
||||
|
@ -46,12 +38,8 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************ */
|
||||
VectorValues::VectorValues(const Vector& x, const Dims& dims) {
|
||||
using Pair = pair<const Key, size_t>;
|
||||
size_t j = 0;
|
||||
for (const Pair& v : dims) {
|
||||
Key key;
|
||||
size_t n;
|
||||
boost::tie(key, n) = v;
|
||||
for (const auto& [key,n] : dims) {
|
||||
#ifdef TBB_GREATER_EQUAL_2020
|
||||
values_.emplace(key, x.segment(j, n));
|
||||
#else
|
||||
|
@ -78,11 +66,11 @@ namespace gtsam {
|
|||
VectorValues VectorValues::Zero(const VectorValues& other)
|
||||
{
|
||||
VectorValues result;
|
||||
for(const KeyValuePair& v: other)
|
||||
for(const auto& [key,value]: other)
|
||||
#ifdef TBB_GREATER_EQUAL_2020
|
||||
result.values_.emplace(v.first, Vector::Zero(v.second.size()));
|
||||
result.values_.emplace(key, Vector::Zero(value.size()));
|
||||
#else
|
||||
result.values_.insert(std::make_pair(v.first, Vector::Zero(v.second.size())));
|
||||
result.values_.insert(std::make_pair(key, Vector::Zero(value.size())));
|
||||
#endif
|
||||
return result;
|
||||
}
|
||||
|
@ -100,18 +88,18 @@ namespace gtsam {
|
|||
/* ************************************************************************ */
|
||||
VectorValues& VectorValues::update(const VectorValues& values) {
|
||||
iterator hint = begin();
|
||||
for (const KeyValuePair& key_value : values) {
|
||||
for (const auto& [key,value] : values) {
|
||||
// Use this trick to find the value using a hint, since we are inserting
|
||||
// from another sorted map
|
||||
size_t oldSize = values_.size();
|
||||
hint = values_.insert(hint, key_value);
|
||||
hint = values_.emplace_hint(hint, key, value);
|
||||
if (values_.size() > oldSize) {
|
||||
values_.unsafe_erase(hint);
|
||||
throw out_of_range(
|
||||
"Requested to update a VectorValues with another VectorValues that "
|
||||
"contains keys not present in the first.");
|
||||
} else {
|
||||
hint->second = key_value.second;
|
||||
hint->second = value;
|
||||
}
|
||||
}
|
||||
return *this;
|
||||
|
@ -131,8 +119,9 @@ namespace gtsam {
|
|||
/* ************************************************************************ */
|
||||
void VectorValues::setZero()
|
||||
{
|
||||
for(Vector& v: values_ | map_values)
|
||||
v.setZero();
|
||||
for(auto& [key, value] : *this) {
|
||||
value.setZero();
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************ */
|
||||
|
@ -140,16 +129,15 @@ namespace gtsam {
|
|||
// Change print depending on whether we are using TBB
|
||||
#ifdef GTSAM_USE_TBB
|
||||
map<Key, Vector> sorted;
|
||||
for (const auto& key_value : v) {
|
||||
sorted.emplace(key_value.first, key_value.second);
|
||||
for (const auto& [key,value] : v) {
|
||||
sorted.emplace(key, value);
|
||||
}
|
||||
for (const auto& key_value : sorted)
|
||||
for (const auto& [key,value] : sorted)
|
||||
#else
|
||||
for (const auto& key_value : v)
|
||||
for (const auto& [key,value] : v)
|
||||
#endif
|
||||
{
|
||||
os << " " << StreamedKey(key_value.first) << ": " << key_value.second.transpose()
|
||||
<< "\n";
|
||||
os << " " << StreamedKey(key) << ": " << value.transpose() << "\n";
|
||||
}
|
||||
return os;
|
||||
}
|
||||
|
@ -166,9 +154,11 @@ namespace gtsam {
|
|||
bool VectorValues::equals(const VectorValues& x, double tol) const {
|
||||
if(this->size() != x.size())
|
||||
return false;
|
||||
for(const auto values: boost::combine(*this, x)) {
|
||||
if(values.get<0>().first != values.get<1>().first ||
|
||||
!equal_with_abs_tol(values.get<0>().second, values.get<1>().second, tol))
|
||||
auto this_it = this->begin();
|
||||
auto x_it = x.begin();
|
||||
for(; this_it != this->end(); ++this_it, ++x_it) {
|
||||
if(this_it->first != x_it->first ||
|
||||
!equal_with_abs_tol(this_it->second, x_it->second, tol))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
|
@ -178,14 +168,15 @@ namespace gtsam {
|
|||
Vector VectorValues::vector() const {
|
||||
// Count dimensions
|
||||
DenseIndex totalDim = 0;
|
||||
for (const Vector& v : *this | map_values) totalDim += v.size();
|
||||
for (const auto& [key, value] : *this)
|
||||
totalDim += value.size();
|
||||
|
||||
// Copy vectors
|
||||
Vector result(totalDim);
|
||||
DenseIndex pos = 0;
|
||||
for (const Vector& v : *this | map_values) {
|
||||
result.segment(pos, v.size()) = v;
|
||||
pos += v.size();
|
||||
for (const auto& [key, value] : *this) {
|
||||
result.segment(pos, value.size()) = value;
|
||||
pos += value.size();
|
||||
}
|
||||
|
||||
return result;
|
||||
|
@ -196,7 +187,7 @@ namespace gtsam {
|
|||
{
|
||||
// Count dimensions
|
||||
DenseIndex totalDim = 0;
|
||||
for(size_t dim: keys | map_values)
|
||||
for (const auto& [key, dim] : keys)
|
||||
totalDim += dim;
|
||||
Vector result(totalDim);
|
||||
size_t j = 0;
|
||||
|
@ -215,19 +206,19 @@ namespace gtsam {
|
|||
/* ************************************************************************ */
|
||||
namespace internal
|
||||
{
|
||||
bool structureCompareOp(const boost::tuple<VectorValues::value_type,
|
||||
VectorValues::value_type>& vv)
|
||||
bool structureCompareOp(const VectorValues::value_type& a, const VectorValues::value_type& b)
|
||||
{
|
||||
return vv.get<0>().first == vv.get<1>().first
|
||||
&& vv.get<0>().second.size() == vv.get<1>().second.size();
|
||||
return a.first == b.first && a.second.size() == b.second.size();
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************ */
|
||||
bool VectorValues::hasSameStructure(const VectorValues other) const
|
||||
{
|
||||
return accumulate(combine(*this, other)
|
||||
| transformed(internal::structureCompareOp), true, logical_and<bool>());
|
||||
// compare the "other" container with this one, using the structureCompareOp
|
||||
// and then return true if all elements are compared as equal
|
||||
return std::equal(this->begin(), this->end(), other.begin(), other.end(),
|
||||
internal::structureCompareOp);
|
||||
}
|
||||
|
||||
/* ************************************************************************ */
|
||||
|
@ -236,14 +227,14 @@ namespace gtsam {
|
|||
if(this->size() != v.size())
|
||||
throw invalid_argument("VectorValues::dot called with a VectorValues of different structure");
|
||||
double result = 0.0;
|
||||
typedef boost::tuple<value_type, value_type> ValuePair;
|
||||
using boost::adaptors::map_values;
|
||||
for(const ValuePair values: boost::combine(*this, v)) {
|
||||
assert_throw(values.get<0>().first == values.get<1>().first,
|
||||
invalid_argument("VectorValues::dot called with a VectorValues of different structure"));
|
||||
assert_throw(values.get<0>().second.size() == values.get<1>().second.size(),
|
||||
invalid_argument("VectorValues::dot called with a VectorValues of different structure"));
|
||||
result += values.get<0>().second.dot(values.get<1>().second);
|
||||
auto this_it = this->begin();
|
||||
auto v_it = v.begin();
|
||||
for(; this_it != this->end(); ++this_it, ++v_it) {
|
||||
assert_throw(this_it->first == v_it->first,
|
||||
invalid_argument("VectorValues::dot called with a VectorValues of different structure"));
|
||||
assert_throw(this_it->second.size() == v_it->second.size(),
|
||||
invalid_argument("VectorValues::dot called with a VectorValues of different structure"));
|
||||
result += this_it->second.dot(v_it->second);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
@ -256,9 +247,9 @@ namespace gtsam {
|
|||
/* ************************************************************************ */
|
||||
double VectorValues::squaredNorm() const {
|
||||
double sumSquares = 0.0;
|
||||
using boost::adaptors::map_values;
|
||||
for(const Vector& v: *this | map_values)
|
||||
sumSquares += v.squaredNorm();
|
||||
for(const auto& [key, value]: *this) {
|
||||
sumSquares += value.squaredNorm();
|
||||
}
|
||||
return sumSquares;
|
||||
}
|
||||
|
||||
|
@ -372,8 +363,9 @@ namespace gtsam {
|
|||
/* ************************************************************************ */
|
||||
VectorValues& VectorValues::operator*=(double alpha)
|
||||
{
|
||||
for(Vector& v: *this | map_values)
|
||||
v *= alpha;
|
||||
for (auto& [key, value]: *this) {
|
||||
value *= alpha;
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
|
|
@ -24,7 +24,6 @@
|
|||
#include <gtsam/inference/Symbol.h>
|
||||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
#include <boost/bind/bind.hpp>
|
||||
|
||||
// STL/C++
|
||||
|
@ -52,7 +51,7 @@ static GaussianBayesNet noisyBayesNet = {
|
|||
TEST( GaussianBayesNet, Matrix )
|
||||
{
|
||||
Matrix R; Vector d;
|
||||
boost::tie(R,d) = smallBayesNet.matrix(); // find matrix and RHS
|
||||
std::tie(R,d) = smallBayesNet.matrix(); // find matrix and RHS
|
||||
|
||||
Matrix R1 = (Matrix2() <<
|
||||
1.0, 1.0,
|
||||
|
@ -102,7 +101,7 @@ TEST(GaussianBayesNet, Evaluate2) {
|
|||
TEST( GaussianBayesNet, NoisyMatrix )
|
||||
{
|
||||
Matrix R; Vector d;
|
||||
boost::tie(R,d) = noisyBayesNet.matrix(); // find matrix and RHS
|
||||
std::tie(R,d) = noisyBayesNet.matrix(); // find matrix and RHS
|
||||
|
||||
Matrix R1 = (Matrix2() <<
|
||||
0.5, 0.5,
|
||||
|
@ -126,7 +125,7 @@ TEST(GaussianBayesNet, Optimize) {
|
|||
TEST(GaussianBayesNet, NoisyOptimize) {
|
||||
Matrix R;
|
||||
Vector d;
|
||||
boost::tie(R, d) = noisyBayesNet.matrix(); // find matrix and RHS
|
||||
std::tie(R, d) = noisyBayesNet.matrix(); // find matrix and RHS
|
||||
const Vector x = R.inverse() * d;
|
||||
const VectorValues expected{{_x_, x.head(1)}, {_y_, x.tail(1)}};
|
||||
|
||||
|
@ -239,7 +238,7 @@ TEST( GaussianBayesNet, MatrixStress )
|
|||
const Ordering ordering(keys);
|
||||
Matrix R;
|
||||
Vector d;
|
||||
boost::tie(R, d) = bn.matrix(ordering);
|
||||
std::tie(R, d) = bn.matrix(ordering);
|
||||
EXPECT(assert_equal(expected.vector(ordering), R.inverse() * d));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -157,7 +157,7 @@ TEST(GaussianFactorGraph, matrices) {
|
|||
Vector b = Ab.col(Ab.cols() - 1);
|
||||
Matrix actualA;
|
||||
Vector actualb;
|
||||
boost::tie(actualA, actualb) = gfg.jacobian();
|
||||
std::tie(actualA, actualb) = gfg.jacobian();
|
||||
EXPECT(assert_equal(A, actualA));
|
||||
EXPECT(assert_equal(b, actualb));
|
||||
|
||||
|
@ -166,7 +166,7 @@ TEST(GaussianFactorGraph, matrices) {
|
|||
Vector eta = A.transpose() * b;
|
||||
Matrix actualL;
|
||||
Vector actualeta;
|
||||
boost::tie(actualL, actualeta) = gfg.hessian();
|
||||
std::tie(actualL, actualeta) = gfg.hessian();
|
||||
EXPECT(assert_equal(L, actualL));
|
||||
EXPECT(assert_equal(eta, actualeta));
|
||||
|
||||
|
@ -247,7 +247,7 @@ TEST(GaussianFactorGraph, eliminate_empty) {
|
|||
gfg.add(JacobianFactor());
|
||||
GaussianBayesNet::shared_ptr actualBN;
|
||||
GaussianFactorGraph::shared_ptr remainingGFG;
|
||||
boost::tie(actualBN, remainingGFG) = gfg.eliminatePartialSequential(Ordering());
|
||||
std::tie(actualBN, remainingGFG) = gfg.eliminatePartialSequential(Ordering());
|
||||
|
||||
// expected Bayes net is empty
|
||||
GaussianBayesNet expectedBN;
|
||||
|
@ -265,10 +265,10 @@ TEST(GaussianFactorGraph, matrices2) {
|
|||
GaussianFactorGraph gfg = createSimpleGaussianFactorGraph();
|
||||
Matrix A;
|
||||
Vector b;
|
||||
boost::tie(A, b) = gfg.jacobian();
|
||||
std::tie(A, b) = gfg.jacobian();
|
||||
Matrix AtA;
|
||||
Vector eta;
|
||||
boost::tie(AtA, eta) = gfg.hessian();
|
||||
std::tie(AtA, eta) = gfg.hessian();
|
||||
EXPECT(assert_equal(A.transpose() * A, AtA));
|
||||
EXPECT(assert_equal(A.transpose() * b, eta));
|
||||
Matrix expectedAtA(6, 6);
|
||||
|
@ -316,7 +316,7 @@ TEST(GaussianFactorGraph, multiplyHessianAdd2) {
|
|||
// brute force
|
||||
Matrix AtA;
|
||||
Vector eta;
|
||||
boost::tie(AtA, eta) = gfg.hessian();
|
||||
std::tie(AtA, eta) = gfg.hessian();
|
||||
Vector X(6);
|
||||
X << 1, 2, 3, 4, 5, 6;
|
||||
Vector Y(6);
|
||||
|
@ -343,10 +343,10 @@ TEST(GaussianFactorGraph, matricesMixed) {
|
|||
GaussianFactorGraph gfg = createGaussianFactorGraphWithHessianFactor();
|
||||
Matrix A;
|
||||
Vector b;
|
||||
boost::tie(A, b) = gfg.jacobian(); // incorrect !
|
||||
std::tie(A, b) = gfg.jacobian(); // incorrect !
|
||||
Matrix AtA;
|
||||
Vector eta;
|
||||
boost::tie(AtA, eta) = gfg.hessian(); // correct
|
||||
std::tie(AtA, eta) = gfg.hessian(); // correct
|
||||
EXPECT(assert_equal(A.transpose() * A, AtA));
|
||||
Vector expected = -(Vector(6) << -25, 17.5, 5, -13.5, 29, 4).finished();
|
||||
EXPECT(assert_equal(expected, eta));
|
||||
|
|
|
@ -295,13 +295,13 @@ TEST(HessianFactor, CombineAndEliminate1) {
|
|||
Ordering ordering {1};
|
||||
GaussianConditional::shared_ptr expectedConditional;
|
||||
JacobianFactor::shared_ptr expectedFactor;
|
||||
boost::tie(expectedConditional, expectedFactor) = jacobian.eliminate(ordering);
|
||||
std::tie(expectedConditional, expectedFactor) = jacobian.eliminate(ordering);
|
||||
CHECK(expectedFactor);
|
||||
|
||||
// Eliminate
|
||||
GaussianConditional::shared_ptr actualConditional;
|
||||
HessianFactor::shared_ptr actualHessian;
|
||||
boost::tie(actualConditional, actualHessian) = //
|
||||
std::tie(actualConditional, actualHessian) = //
|
||||
EliminateCholesky(gfg, ordering);
|
||||
actualConditional->setModel(false,Vector3(1,1,1)); // add a unit model for comparison
|
||||
|
||||
|
@ -358,13 +358,13 @@ TEST(HessianFactor, CombineAndEliminate2) {
|
|||
Ordering ordering {0};
|
||||
GaussianConditional::shared_ptr expectedConditional;
|
||||
JacobianFactor::shared_ptr expectedFactor;
|
||||
boost::tie(expectedConditional, expectedFactor) = //
|
||||
std::tie(expectedConditional, expectedFactor) = //
|
||||
jacobian.eliminate(ordering);
|
||||
|
||||
// Eliminate
|
||||
GaussianConditional::shared_ptr actualConditional;
|
||||
HessianFactor::shared_ptr actualHessian;
|
||||
boost::tie(actualConditional, actualHessian) = //
|
||||
std::tie(actualConditional, actualHessian) = //
|
||||
EliminateCholesky(gfg, ordering);
|
||||
actualConditional->setModel(false,Vector3(1,1,1)); // add a unit model for comparison
|
||||
|
||||
|
@ -498,7 +498,7 @@ TEST(HessianFactor, gradientAtZero)
|
|||
|
||||
// test gradient at zero
|
||||
VectorValues expectedG{{0, -g1}, {1, -g2}};
|
||||
Matrix A; Vector b; boost::tie(A,b) = factor.jacobian();
|
||||
Matrix A; Vector b; std::tie(A,b) = factor.jacobian();
|
||||
KeyVector keys {0, 1};
|
||||
EXPECT(assert_equal(-A.transpose()*b, expectedG.vector(keys)));
|
||||
VectorValues actualG = factor.gradientAtZero();
|
||||
|
|
|
@ -26,7 +26,6 @@
|
|||
#include <gtsam/linear/VectorValues.h>
|
||||
|
||||
#include <boost/range/iterator_range.hpp>
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
@ -36,8 +35,8 @@ using Dims = std::vector<Eigen::Index>; // For constructing block matrices
|
|||
namespace {
|
||||
namespace simple {
|
||||
// Terms we'll use
|
||||
const vector<pair<Key, Matrix> > terms{
|
||||
{5, I_3x3}, {10, 2 * I_3x3}, {15, 3 * I_3x3}};
|
||||
using Terms = vector<pair<Key, Matrix> >;
|
||||
const Terms terms{{5, I_3x3}, {10, 2 * I_3x3}, {15, 3 * I_3x3}};
|
||||
|
||||
// RHS and sigmas
|
||||
const Vector b = Vector3(1., 2., 3.);
|
||||
|
@ -54,8 +53,7 @@ TEST(JacobianFactor, constructors_and_accessors)
|
|||
// Test for using different numbers of terms
|
||||
{
|
||||
// b vector only constructor
|
||||
JacobianFactor expected(
|
||||
boost::make_iterator_range(terms.begin(), terms.begin()), b);
|
||||
JacobianFactor expected(Terms{}, b);
|
||||
JacobianFactor actual(b);
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
EXPECT(assert_equal(b, expected.getb()));
|
||||
|
@ -65,8 +63,7 @@ TEST(JacobianFactor, constructors_and_accessors)
|
|||
}
|
||||
{
|
||||
// One term constructor
|
||||
JacobianFactor expected(
|
||||
boost::make_iterator_range(terms.begin(), terms.begin() + 1), b, noise);
|
||||
JacobianFactor expected(Terms{terms[0]}, b, noise);
|
||||
JacobianFactor actual(terms[0].first, terms[0].second, b, noise);
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
LONGS_EQUAL((long)terms[0].first, (long)actual.keys().back());
|
||||
|
@ -78,8 +75,7 @@ TEST(JacobianFactor, constructors_and_accessors)
|
|||
}
|
||||
{
|
||||
// Two term constructor
|
||||
JacobianFactor expected(
|
||||
boost::make_iterator_range(terms.begin(), terms.begin() + 2), b, noise);
|
||||
JacobianFactor expected(Terms{terms[0], terms[1]}, b, noise);
|
||||
JacobianFactor actual(terms[0].first, terms[0].second,
|
||||
terms[1].first, terms[1].second, b, noise);
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
|
@ -92,8 +88,7 @@ TEST(JacobianFactor, constructors_and_accessors)
|
|||
}
|
||||
{
|
||||
// Three term constructor
|
||||
JacobianFactor expected(
|
||||
boost::make_iterator_range(terms.begin(), terms.begin() + 3), b, noise);
|
||||
JacobianFactor expected(Terms{terms[0], terms[1], terms[2]}, b, noise);
|
||||
JacobianFactor actual(terms[0].first, terms[0].second,
|
||||
terms[1].first, terms[1].second, terms[2].first, terms[2].second, b, noise);
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
|
@ -106,8 +101,7 @@ TEST(JacobianFactor, constructors_and_accessors)
|
|||
}
|
||||
{
|
||||
// Test three-term constructor with std::map
|
||||
JacobianFactor expected(
|
||||
boost::make_iterator_range(terms.begin(), terms.begin() + 3), b, noise);
|
||||
JacobianFactor expected(Terms{terms[0], terms[1], terms[2]}, b, noise);
|
||||
map<Key,Matrix> mapTerms;
|
||||
// note order of insertion plays no role: order will be determined by keys
|
||||
mapTerms.insert(terms[2]);
|
||||
|
@ -124,14 +118,17 @@ TEST(JacobianFactor, constructors_and_accessors)
|
|||
}
|
||||
{
|
||||
// VerticalBlockMatrix constructor
|
||||
JacobianFactor expected(
|
||||
boost::make_iterator_range(terms.begin(), terms.begin() + 3), b, noise);
|
||||
JacobianFactor expected(Terms{terms[0], terms[1], terms[2]}, b, noise);
|
||||
VerticalBlockMatrix blockMatrix(Dims{3, 3, 3, 1}, 3);
|
||||
blockMatrix(0) = terms[0].second;
|
||||
blockMatrix(1) = terms[1].second;
|
||||
blockMatrix(2) = terms[2].second;
|
||||
blockMatrix(3) = b;
|
||||
JacobianFactor actual(terms | boost::adaptors::map_keys, blockMatrix, noise);
|
||||
// get a vector of keys from the terms
|
||||
vector<Key> keys;
|
||||
for (const auto& term : terms)
|
||||
keys.push_back(term.first);
|
||||
JacobianFactor actual(keys, blockMatrix, noise);
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
LONGS_EQUAL((long)terms[2].first, (long)actual.keys().back());
|
||||
EXPECT(assert_equal(terms[2].second, actual.getA(actual.end() - 1)));
|
||||
|
@ -371,7 +368,7 @@ TEST(JacobianFactor, operators )
|
|||
EXPECT(assert_equal(expectedX, actualX));
|
||||
|
||||
// test gradient at zero
|
||||
Matrix A; Vector b2; boost::tie(A,b2) = lf.jacobian();
|
||||
Matrix A; Vector b2; std::tie(A,b2) = lf.jacobian();
|
||||
VectorValues expectedG;
|
||||
expectedG.insert(1, Vector2(20,-10));
|
||||
expectedG.insert(2, Vector2(-20, 10));
|
||||
|
|
|
@ -24,9 +24,6 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/range/iterator_range.hpp>
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
||||
|
|
|
@ -21,12 +21,9 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
|
||||
#include <sstream>
|
||||
|
||||
using namespace std;
|
||||
using boost::adaptors::map_keys;
|
||||
using namespace gtsam;
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -68,7 +68,7 @@ void ManifoldPreintegration::update(const Vector3& measuredAcc,
|
|||
// Possibly correct for sensor pose
|
||||
Matrix3 D_correctedAcc_acc, D_correctedAcc_omega, D_correctedOmega_omega;
|
||||
if (p().body_P_sensor)
|
||||
boost::tie(acc, omega) = correctMeasurementsBySensorPose(acc, omega,
|
||||
std::tie(acc, omega) = correctMeasurementsBySensorPose(acc, omega,
|
||||
D_correctedAcc_acc, D_correctedAcc_omega, D_correctedOmega_omega);
|
||||
|
||||
// Save current rotation for updating Jacobians
|
||||
|
|
|
@ -112,7 +112,7 @@ void TangentPreintegration::update(const Vector3& measuredAcc,
|
|||
// Possibly correct for sensor pose by converting to body frame
|
||||
Matrix3 D_correctedAcc_acc, D_correctedAcc_omega, D_correctedOmega_omega;
|
||||
if (p().body_P_sensor)
|
||||
boost::tie(acc, omega) = correctMeasurementsBySensorPose(acc, omega,
|
||||
std::tie(acc, omega) = correctMeasurementsBySensorPose(acc, omega,
|
||||
D_correctedAcc_acc, D_correctedAcc_omega, D_correctedOmega_omega);
|
||||
|
||||
// Do update
|
||||
|
|
|
@ -125,7 +125,7 @@ TEST(GPSData, init) {
|
|||
// Estimate initial state
|
||||
Pose3 T;
|
||||
Vector3 nV;
|
||||
boost::tie(T, nV) = GPSFactor::EstimateState(t1, NED1, t2, NED2, 84831.0796);
|
||||
std::tie(T, nV) = GPSFactor::EstimateState(t1, NED1, t2, NED2, 84831.0796);
|
||||
|
||||
// Check values values
|
||||
EXPECT(assert_equal((Vector )Vector3(29.9575, -29.0564, -1.95993), nV, 1e-4));
|
||||
|
|
|
@ -21,11 +21,6 @@
|
|||
|
||||
#include <gtsam/nonlinear/internal/ExpressionNode.h>
|
||||
|
||||
#include <boost/bind/bind.hpp>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
#include <boost/range/algorithm.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
template<typename T>
|
||||
|
@ -143,7 +138,7 @@ T Expression<T>::value(const Values& values,
|
|||
// Call private version that returns derivatives in H
|
||||
KeyVector keys;
|
||||
FastVector<int> dims;
|
||||
boost::tie(keys, dims) = keysAndDims();
|
||||
std::tie(keys, dims) = keysAndDims();
|
||||
return valueAndDerivatives(values, keys, dims, *H);
|
||||
} else
|
||||
// no derivatives needed, just return value
|
||||
|
@ -229,8 +224,13 @@ typename Expression<T>::KeysAndDims Expression<T>::keysAndDims() const {
|
|||
dims(map);
|
||||
size_t n = map.size();
|
||||
KeysAndDims pair = std::make_pair(KeyVector(n), FastVector<int>(n));
|
||||
boost::copy(map | boost::adaptors::map_keys, pair.first.begin());
|
||||
boost::copy(map | boost::adaptors::map_values, pair.second.begin());
|
||||
// Copy map into pair of vectors
|
||||
auto key_it = pair.first.begin();
|
||||
auto dim_it = pair.second.begin();
|
||||
for (const auto& [key, value] : map) {
|
||||
*key_it++ = key;
|
||||
*dim_it++ = value;
|
||||
}
|
||||
return pair;
|
||||
}
|
||||
|
||||
|
|
|
@ -180,7 +180,7 @@ protected:
|
|||
if (keys_.empty()) {
|
||||
// This is the case when called in ExpressionFactor Constructor.
|
||||
// We then take the keys from the expression in sorted order.
|
||||
boost::tie(keys_, dims_) = expression_.keysAndDims();
|
||||
std::tie(keys_, dims_) = expression_.keysAndDims();
|
||||
} else {
|
||||
// This happens with classes derived from BinaryExpressionFactor etc.
|
||||
// In that case, the keys_ are already defined and we just need to grab
|
||||
|
|
|
@ -22,7 +22,6 @@
|
|||
#include <gtsam/inference/Symbol.h> // for selective linearization thresholds
|
||||
#include <gtsam/nonlinear/ISAM2-impl.h>
|
||||
|
||||
#include <boost/range/adaptors.hpp>
|
||||
#include <functional>
|
||||
#include <limits>
|
||||
#include <string>
|
||||
|
|
|
@ -28,13 +28,6 @@
|
|||
#include <gtsam/linear/GaussianBayesTree.h>
|
||||
#include <gtsam/linear/GaussianEliminationTree.h>
|
||||
|
||||
#include <boost/range/adaptors.hpp>
|
||||
#include <boost/range/algorithm/copy.hpp>
|
||||
namespace br {
|
||||
using namespace boost::range;
|
||||
using namespace boost::adaptors;
|
||||
} // namespace br
|
||||
|
||||
#include <algorithm>
|
||||
#include <limits>
|
||||
#include <string>
|
||||
|
|
|
@ -176,9 +176,11 @@ void ISAM2::recalculateBatch(const ISAM2UpdateParams& updateParams,
|
|||
gttic(recalculateBatch);
|
||||
|
||||
gttic(add_keys);
|
||||
br::copy(variableIndex_ | br::map_keys,
|
||||
std::inserter(*affectedKeysSet, affectedKeysSet->end()));
|
||||
|
||||
// copy the keys from the variableIndex_ to the affectedKeysSet
|
||||
for (const auto& [key, _] : variableIndex_) {
|
||||
affectedKeysSet->insert(key);
|
||||
}
|
||||
// Removed unused keys:
|
||||
VariableIndex affectedFactorsVarIndex = variableIndex_;
|
||||
|
||||
|
|
|
@ -29,7 +29,6 @@
|
|||
#include <gtsam/base/timing.h>
|
||||
|
||||
#include <boost/format.hpp>
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
|
||||
#include <cmath>
|
||||
#include <fstream>
|
||||
|
@ -41,7 +40,6 @@ using namespace std;
|
|||
|
||||
namespace gtsam {
|
||||
|
||||
using boost::adaptors::map_values;
|
||||
typedef internal::LevenbergMarquardtState State;
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -281,8 +279,8 @@ GaussianFactorGraph::shared_ptr LevenbergMarquardtOptimizer::iterate() {
|
|||
VectorValues sqrtHessianDiagonal;
|
||||
if (params_.diagonalDamping) {
|
||||
sqrtHessianDiagonal = linear->hessianDiagonal();
|
||||
for (Vector& v : sqrtHessianDiagonal | map_values) {
|
||||
v = v.cwiseMax(params_.minDiagonal).cwiseMin(params_.maxDiagonal).cwiseSqrt();
|
||||
for (auto& [key, value] : sqrtHessianDiagonal) {
|
||||
value = value.cwiseMax(params_.minDiagonal).cwiseMin(params_.maxDiagonal).cwiseSqrt();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
|
||||
#include <gtsam/nonlinear/LevenbergMarquardtParams.h>
|
||||
#include <boost/algorithm/string/case_conv.hpp>
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
|
||||
|
|
|
@ -65,7 +65,7 @@ NonlinearConjugateGradientOptimizer::System::State NonlinearConjugateGradientOpt
|
|||
GaussianFactorGraph::shared_ptr NonlinearConjugateGradientOptimizer::iterate() {
|
||||
Values newValues;
|
||||
int dummy;
|
||||
boost::tie(newValues, dummy) = nonlinearConjugateGradient<System, Values>(
|
||||
std::tie(newValues, dummy) = nonlinearConjugateGradient<System, Values>(
|
||||
System(graph_), state_->values, params_, true /* single iteration */);
|
||||
state_.reset(new State(newValues, graph_.error(newValues), state_->iterations + 1));
|
||||
|
||||
|
@ -78,7 +78,7 @@ const Values& NonlinearConjugateGradientOptimizer::optimize() {
|
|||
System system(graph_);
|
||||
Values newValues;
|
||||
int iterations;
|
||||
boost::tie(newValues, iterations) =
|
||||
std::tie(newValues, iterations) =
|
||||
nonlinearConjugateGradient(system, state_->values, params_, false);
|
||||
state_.reset(new State(std::move(newValues), graph_.error(newValues), iterations));
|
||||
return state_->values;
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
|
||||
#include <gtsam/base/Manifold.h>
|
||||
#include <gtsam/nonlinear/NonlinearOptimizer.h>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
@ -145,7 +144,7 @@ double lineSearch(const S &system, const V currentValues, const W &gradient) {
|
|||
* The last parameter is a switch between gradient-descent and conjugate gradient
|
||||
*/
|
||||
template<class S, class V>
|
||||
boost::tuple<V, int> nonlinearConjugateGradient(const S &system,
|
||||
std::tuple<V, int> nonlinearConjugateGradient(const S &system,
|
||||
const V &initial, const NonlinearOptimizerParams ¶ms,
|
||||
const bool singleIteration, const bool gradientDescent = false) {
|
||||
|
||||
|
@ -160,7 +159,7 @@ boost::tuple<V, int> nonlinearConjugateGradient(const S &system,
|
|||
std::cout << "Exiting, as error = " << currentError << " < "
|
||||
<< params.errorTol << std::endl;
|
||||
}
|
||||
return boost::tie(initial, iteration);
|
||||
return std::tie(initial, iteration);
|
||||
}
|
||||
|
||||
V currentValues = initial;
|
||||
|
@ -218,7 +217,7 @@ boost::tuple<V, int> nonlinearConjugateGradient(const S &system,
|
|||
<< "nonlinearConjugateGradient: Terminating because reached maximum iterations"
|
||||
<< std::endl;
|
||||
|
||||
return boost::tie(currentValues, iteration);
|
||||
return std::tie(currentValues, iteration);
|
||||
}
|
||||
|
||||
} // \ namespace gtsam
|
||||
|
|
|
@ -48,7 +48,7 @@ public:
|
|||
GaussianFactorGraph::shared_ptr fg;
|
||||
KeyVector variables;
|
||||
variables.push_back(pointKey);
|
||||
boost::tie(bn, fg) = gfg.eliminatePartialSequential(variables, EliminateQR);
|
||||
std::tie(bn, fg) = gfg.eliminatePartialSequential(variables, EliminateQR);
|
||||
//fg->print("fg");
|
||||
|
||||
JacobianFactor::operator=(JacobianFactor(*fg));
|
||||
|
|
|
@ -94,7 +94,7 @@ TEST(dataSet, load2D) {
|
|||
const string filename = findExampleDataFile("w100.graph");
|
||||
NonlinearFactorGraph::shared_ptr graph;
|
||||
Values::shared_ptr initial;
|
||||
boost::tie(graph, initial) = load2D(filename);
|
||||
std::tie(graph, initial) = load2D(filename);
|
||||
EXPECT_LONGS_EQUAL(300, graph->size());
|
||||
EXPECT_LONGS_EQUAL(100, initial->size());
|
||||
auto model = noiseModel::Unit::Create(3);
|
||||
|
@ -139,13 +139,13 @@ TEST(dataSet, load2DVictoriaPark) {
|
|||
Values::shared_ptr initial;
|
||||
|
||||
// Load all
|
||||
boost::tie(graph, initial) = load2D(filename);
|
||||
std::tie(graph, initial) = load2D(filename);
|
||||
EXPECT_LONGS_EQUAL(10608, graph->size());
|
||||
EXPECT_LONGS_EQUAL(7120, initial->size());
|
||||
|
||||
// Restrict keys
|
||||
size_t maxIndex = 5;
|
||||
boost::tie(graph, initial) = load2D(filename, nullptr, maxIndex);
|
||||
std::tie(graph, initial) = load2D(filename, nullptr, maxIndex);
|
||||
EXPECT_LONGS_EQUAL(5, graph->size());
|
||||
EXPECT_LONGS_EQUAL(6, initial->size()); // file has 0 as well
|
||||
EXPECT_LONGS_EQUAL(L(5), graph->at(4)->keys()[1]);
|
||||
|
@ -221,7 +221,7 @@ TEST(dataSet, readG2o3D) {
|
|||
NonlinearFactorGraph::shared_ptr actualGraph;
|
||||
Values::shared_ptr actualValues;
|
||||
bool is3D = true;
|
||||
boost::tie(actualGraph, actualValues) = readG2o(g2oFile, is3D);
|
||||
std::tie(actualGraph, actualValues) = readG2o(g2oFile, is3D);
|
||||
EXPECT(assert_equal(expectedGraph, *actualGraph, 1e-5));
|
||||
for (size_t j : {0, 1, 2, 3, 4}) {
|
||||
EXPECT(assert_equal(poses[j], actualValues->at<Pose3>(j), 1e-5));
|
||||
|
@ -235,7 +235,7 @@ TEST( dataSet, readG2o3DNonDiagonalNoise)
|
|||
NonlinearFactorGraph::shared_ptr actualGraph;
|
||||
Values::shared_ptr actualValues;
|
||||
bool is3D = true;
|
||||
boost::tie(actualGraph, actualValues) = readG2o(g2oFile, is3D);
|
||||
std::tie(actualGraph, actualValues) = readG2o(g2oFile, is3D);
|
||||
|
||||
Values expectedValues;
|
||||
Rot3 R0 = Rot3::Quaternion(1.000000, 0.000000, 0.000000, 0.000000 );
|
||||
|
@ -329,7 +329,7 @@ TEST(dataSet, readG2o) {
|
|||
const string g2oFile = findExampleDataFile("pose2example");
|
||||
NonlinearFactorGraph::shared_ptr actualGraph;
|
||||
Values::shared_ptr actualValues;
|
||||
boost::tie(actualGraph, actualValues) = readG2o(g2oFile);
|
||||
std::tie(actualGraph, actualValues) = readG2o(g2oFile);
|
||||
|
||||
auto model = noiseModel::Diagonal::Precisions(
|
||||
Vector3(44.721360, 44.721360, 30.901699));
|
||||
|
@ -356,7 +356,7 @@ TEST(dataSet, readG2oHuber) {
|
|||
NonlinearFactorGraph::shared_ptr actualGraph;
|
||||
Values::shared_ptr actualValues;
|
||||
bool is3D = false;
|
||||
boost::tie(actualGraph, actualValues) =
|
||||
std::tie(actualGraph, actualValues) =
|
||||
readG2o(g2oFile, is3D, KernelFunctionTypeHUBER);
|
||||
|
||||
auto baseModel = noiseModel::Diagonal::Precisions(
|
||||
|
@ -373,7 +373,7 @@ TEST(dataSet, readG2oTukey) {
|
|||
NonlinearFactorGraph::shared_ptr actualGraph;
|
||||
Values::shared_ptr actualValues;
|
||||
bool is3D = false;
|
||||
boost::tie(actualGraph, actualValues) =
|
||||
std::tie(actualGraph, actualValues) =
|
||||
readG2o(g2oFile, is3D, KernelFunctionTypeTUKEY);
|
||||
|
||||
auto baseModel = noiseModel::Diagonal::Precisions(
|
||||
|
@ -390,14 +390,14 @@ TEST( dataSet, writeG2o)
|
|||
const string g2oFile = findExampleDataFile("pose2example");
|
||||
NonlinearFactorGraph::shared_ptr expectedGraph;
|
||||
Values::shared_ptr expectedValues;
|
||||
boost::tie(expectedGraph, expectedValues) = readG2o(g2oFile);
|
||||
std::tie(expectedGraph, expectedValues) = readG2o(g2oFile);
|
||||
|
||||
const string filenameToWrite = createRewrittenFileName(g2oFile);
|
||||
writeG2o(*expectedGraph, *expectedValues, filenameToWrite);
|
||||
|
||||
NonlinearFactorGraph::shared_ptr actualGraph;
|
||||
Values::shared_ptr actualValues;
|
||||
boost::tie(actualGraph, actualValues) = readG2o(filenameToWrite);
|
||||
std::tie(actualGraph, actualValues) = readG2o(filenameToWrite);
|
||||
EXPECT(assert_equal(*expectedValues,*actualValues,1e-5));
|
||||
EXPECT(assert_equal(*expectedGraph,*actualGraph,1e-5));
|
||||
}
|
||||
|
@ -409,14 +409,14 @@ TEST( dataSet, writeG2o3D)
|
|||
NonlinearFactorGraph::shared_ptr expectedGraph;
|
||||
Values::shared_ptr expectedValues;
|
||||
bool is3D = true;
|
||||
boost::tie(expectedGraph, expectedValues) = readG2o(g2oFile, is3D);
|
||||
std::tie(expectedGraph, expectedValues) = readG2o(g2oFile, is3D);
|
||||
|
||||
const string filenameToWrite = createRewrittenFileName(g2oFile);
|
||||
writeG2o(*expectedGraph, *expectedValues, filenameToWrite);
|
||||
|
||||
NonlinearFactorGraph::shared_ptr actualGraph;
|
||||
Values::shared_ptr actualValues;
|
||||
boost::tie(actualGraph, actualValues) = readG2o(filenameToWrite, is3D);
|
||||
std::tie(actualGraph, actualValues) = readG2o(filenameToWrite, is3D);
|
||||
EXPECT(assert_equal(*expectedValues,*actualValues,1e-4));
|
||||
EXPECT(assert_equal(*expectedGraph,*actualGraph,1e-4));
|
||||
}
|
||||
|
@ -428,14 +428,14 @@ TEST( dataSet, writeG2o3DNonDiagonalNoise)
|
|||
NonlinearFactorGraph::shared_ptr expectedGraph;
|
||||
Values::shared_ptr expectedValues;
|
||||
bool is3D = true;
|
||||
boost::tie(expectedGraph, expectedValues) = readG2o(g2oFile, is3D);
|
||||
std::tie(expectedGraph, expectedValues) = readG2o(g2oFile, is3D);
|
||||
|
||||
const string filenameToWrite = createRewrittenFileName(g2oFile);
|
||||
writeG2o(*expectedGraph, *expectedValues, filenameToWrite);
|
||||
|
||||
NonlinearFactorGraph::shared_ptr actualGraph;
|
||||
Values::shared_ptr actualValues;
|
||||
boost::tie(actualGraph, actualValues) = readG2o(filenameToWrite, is3D);
|
||||
std::tie(actualGraph, actualValues) = readG2o(filenameToWrite, is3D);
|
||||
EXPECT(assert_equal(*expectedValues,*actualValues,1e-4));
|
||||
EXPECT(assert_equal(*expectedGraph,*actualGraph,1e-4));
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ TEST(InitializePose3, computePoses2D) {
|
|||
NonlinearFactorGraph::shared_ptr inputGraph;
|
||||
Values::shared_ptr posesInFile;
|
||||
bool is3D = false;
|
||||
boost::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D);
|
||||
std::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D);
|
||||
|
||||
auto priorModel = noiseModel::Unit::Create(3);
|
||||
inputGraph->addPrior(0, posesInFile->at<Pose2>(0), priorModel);
|
||||
|
@ -59,7 +59,7 @@ TEST(InitializePose3, computePoses3D) {
|
|||
NonlinearFactorGraph::shared_ptr inputGraph;
|
||||
Values::shared_ptr posesInFile;
|
||||
bool is3D = true;
|
||||
boost::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D);
|
||||
std::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D);
|
||||
|
||||
auto priorModel = noiseModel::Unit::Create(6);
|
||||
inputGraph->addPrior(0, posesInFile->at<Pose3>(0), priorModel);
|
||||
|
|
|
@ -234,7 +234,7 @@ TEST( InitializePose3, orientationsGradient ) {
|
|||
NonlinearFactorGraph::shared_ptr matlabGraph;
|
||||
Values::shared_ptr matlabValues;
|
||||
bool is3D = true;
|
||||
boost::tie(matlabGraph, matlabValues) = readG2o(matlabResultsfile, is3D);
|
||||
std::tie(matlabGraph, matlabValues) = readG2o(matlabResultsfile, is3D);
|
||||
|
||||
Rot3 R0Expected = matlabValues->at<Pose3>(1).rotation();
|
||||
EXPECT(assert_equal(R0Expected, orientations.at<Rot3>(x0), 1e-4));
|
||||
|
@ -269,7 +269,7 @@ TEST(InitializePose3, initializePoses) {
|
|||
NonlinearFactorGraph::shared_ptr inputGraph;
|
||||
Values::shared_ptr posesInFile;
|
||||
bool is3D = true;
|
||||
boost::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D);
|
||||
std::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D);
|
||||
|
||||
auto priorModel = noiseModel::Unit::Create(6);
|
||||
inputGraph->addPrior(0, Pose3(), priorModel);
|
||||
|
|
|
@ -260,7 +260,7 @@ TEST( Lago, largeGraphNoisy_orientations ) {
|
|||
string inputFile = findExampleDataFile("noisyToyGraph");
|
||||
NonlinearFactorGraph::shared_ptr g;
|
||||
Values::shared_ptr initial;
|
||||
boost::tie(g, initial) = readG2o(inputFile);
|
||||
std::tie(g, initial) = readG2o(inputFile);
|
||||
|
||||
// Add prior on the pose having index (key) = 0
|
||||
NonlinearFactorGraph graphWithPrior = *g;
|
||||
|
@ -281,7 +281,7 @@ TEST( Lago, largeGraphNoisy_orientations ) {
|
|||
string matlabFile = findExampleDataFile("orientationsNoisyToyGraph");
|
||||
NonlinearFactorGraph::shared_ptr gmatlab;
|
||||
Values::shared_ptr expected;
|
||||
boost::tie(gmatlab, expected) = readG2o(matlabFile);
|
||||
std::tie(gmatlab, expected) = readG2o(matlabFile);
|
||||
|
||||
for(const auto& key_pose: expected->extract<Pose2>()){
|
||||
const Key& k = key_pose.first;
|
||||
|
@ -296,7 +296,7 @@ TEST( Lago, largeGraphNoisy ) {
|
|||
string inputFile = findExampleDataFile("noisyToyGraph");
|
||||
NonlinearFactorGraph::shared_ptr g;
|
||||
Values::shared_ptr initial;
|
||||
boost::tie(g, initial) = readG2o(inputFile);
|
||||
std::tie(g, initial) = readG2o(inputFile);
|
||||
|
||||
// Add prior on the pose having index (key) = 0
|
||||
NonlinearFactorGraph graphWithPrior = *g;
|
||||
|
@ -308,7 +308,7 @@ TEST( Lago, largeGraphNoisy ) {
|
|||
string matlabFile = findExampleDataFile("optimizedNoisyToyGraph");
|
||||
NonlinearFactorGraph::shared_ptr gmatlab;
|
||||
Values::shared_ptr expected;
|
||||
boost::tie(gmatlab, expected) = readG2o(matlabFile);
|
||||
std::tie(gmatlab, expected) = readG2o(matlabFile);
|
||||
|
||||
for(const auto& key_pose: expected->extract<Pose2>()){
|
||||
const Key& k = key_pose.first;
|
||||
|
|
|
@ -27,8 +27,6 @@
|
|||
#include <gtsam/linear/GaussianFactor.h>
|
||||
#include <gtsam/base/timing.h>
|
||||
|
||||
#include <boost/range/iterator_range.hpp>
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
using namespace std;
|
||||
|
|
|
@ -22,11 +22,10 @@
|
|||
#include <gtsam/symbolic/SymbolicBayesTree.h>
|
||||
#include <gtsam/symbolic/tests/symbolicExampleGraphs.h>
|
||||
|
||||
#include <boost/range/adaptor/indirected.hpp>
|
||||
using boost::adaptors::indirected;
|
||||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <gtsam/base/TestableAssertions.h>
|
||||
#include <iterator>
|
||||
#include <type_traits>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
@ -34,6 +33,24 @@ using namespace gtsam::symbol_shorthand;
|
|||
|
||||
static bool debug = false;
|
||||
|
||||
// Given a vector of shared pointers infer the type of the pointed-to objects
|
||||
template<typename T>
|
||||
using PointedToType = std::decay_t<decltype(**declval<T>().begin())>;
|
||||
|
||||
// Given a vector of shared pointers infer the type of the pointed-to objects
|
||||
template<typename T>
|
||||
using ValuesVector = std::vector<PointedToType<T>>;
|
||||
|
||||
// Return a vector of dereferenced values
|
||||
template<typename T>
|
||||
ValuesVector<T> deref(const T& v) {
|
||||
ValuesVector<T> result;
|
||||
for (auto& t : v)
|
||||
result.push_back(*t);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(SymbolicBayesTree, clear) {
|
||||
SymbolicBayesTree bayesTree = asiaBayesTree;
|
||||
|
@ -111,8 +128,7 @@ TEST(BayesTree, removePath) {
|
|||
bayesTree.removePath(bayesTree[_C_], &bn, &orphans);
|
||||
SymbolicFactorGraph factors(bn);
|
||||
CHECK(assert_equal(expected, factors));
|
||||
CHECK(assert_container_equal(expectedOrphans | indirected,
|
||||
orphans | indirected));
|
||||
CHECK(assert_container_equal(deref(expectedOrphans), deref(orphans)));
|
||||
|
||||
bayesTree = bayesTreeOrig;
|
||||
|
||||
|
@ -127,8 +143,7 @@ TEST(BayesTree, removePath) {
|
|||
bayesTree.removePath(bayesTree[_E_], &bn2, &orphans2);
|
||||
SymbolicFactorGraph factors2(bn2);
|
||||
CHECK(assert_equal(expected2, factors2));
|
||||
CHECK(assert_container_equal(expectedOrphans2 | indirected,
|
||||
orphans2 | indirected));
|
||||
CHECK(assert_container_equal(deref(expectedOrphans2), deref(orphans2)));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -147,8 +162,7 @@ TEST(BayesTree, removePath2) {
|
|||
CHECK(assert_equal(expected, factors));
|
||||
SymbolicBayesTree::Cliques expectedOrphans{bayesTree[_S_], bayesTree[_T_],
|
||||
bayesTree[_X_]};
|
||||
CHECK(assert_container_equal(expectedOrphans | indirected,
|
||||
orphans | indirected));
|
||||
CHECK(assert_container_equal(deref(expectedOrphans), deref(orphans)));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -167,8 +181,7 @@ TEST(BayesTree, removePath3) {
|
|||
expected.emplace_shared<SymbolicFactor>(_T_, _E_, _L_);
|
||||
CHECK(assert_equal(expected, factors));
|
||||
SymbolicBayesTree::Cliques expectedOrphans{bayesTree[_S_], bayesTree[_X_]};
|
||||
CHECK(assert_container_equal(expectedOrphans | indirected,
|
||||
orphans | indirected));
|
||||
CHECK(assert_container_equal(deref(expectedOrphans), deref(orphans)));
|
||||
}
|
||||
|
||||
void getAllCliques(const SymbolicBayesTree::sharedClique& subtree,
|
||||
|
@ -249,8 +262,7 @@ TEST(BayesTree, removeTop) {
|
|||
CHECK(assert_equal(expected, bn));
|
||||
|
||||
SymbolicBayesTree::Cliques expectedOrphans{bayesTree[_T_], bayesTree[_X_]};
|
||||
CHECK(assert_container_equal(expectedOrphans | indirected,
|
||||
orphans | indirected));
|
||||
CHECK(assert_container_equal(deref(expectedOrphans), deref(orphans)));
|
||||
|
||||
// Try removeTop again with a factor that should not change a thing
|
||||
// std::shared_ptr<IndexFactor> newFactor2(new IndexFactor(_B_));
|
||||
|
@ -261,8 +273,7 @@ TEST(BayesTree, removeTop) {
|
|||
SymbolicFactorGraph expected2;
|
||||
CHECK(assert_equal(expected2, factors2));
|
||||
SymbolicBayesTree::Cliques expectedOrphans2;
|
||||
CHECK(assert_container_equal(expectedOrphans2 | indirected,
|
||||
orphans2 | indirected));
|
||||
CHECK(assert_container_equal(deref(expectedOrphans2), deref(orphans2)));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -286,8 +297,7 @@ TEST(BayesTree, removeTop2) {
|
|||
CHECK(assert_equal(expected, bn));
|
||||
|
||||
SymbolicBayesTree::Cliques expectedOrphans{bayesTree[_S_], bayesTree[_X_]};
|
||||
CHECK(assert_container_equal(expectedOrphans | indirected,
|
||||
orphans | indirected));
|
||||
CHECK(assert_container_equal(deref(expectedOrphans), deref(orphans)));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -21,7 +21,6 @@
|
|||
#include <gtsam/symbolic/SymbolicConditional.h>
|
||||
#include <gtsam/symbolic/SymbolicFactorGraph.h>
|
||||
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
@ -77,7 +76,7 @@ TEST(SymbolicFactor, EliminateSymbolic)
|
|||
|
||||
SymbolicFactor::shared_ptr actualFactor;
|
||||
SymbolicConditional::shared_ptr actualConditional;
|
||||
boost::tie(actualConditional, actualFactor) =
|
||||
std::tie(actualConditional, actualFactor) =
|
||||
EliminateSymbolic(factors, Ordering{0, 1, 2, 3});
|
||||
|
||||
CHECK(assert_equal(expectedConditional, *actualConditional));
|
||||
|
|
|
@ -67,7 +67,7 @@ TEST(SymbolicFactorGraph, eliminatePartialSequential) {
|
|||
|
||||
SymbolicBayesNet::shared_ptr actualBayesNet;
|
||||
SymbolicFactorGraph::shared_ptr actualSfg;
|
||||
boost::tie(actualBayesNet, actualSfg) =
|
||||
std::tie(actualBayesNet, actualSfg) =
|
||||
simpleTestGraph2.eliminatePartialSequential(Ordering{0, 1});
|
||||
|
||||
EXPECT(assert_equal(expectedSfg, *actualSfg));
|
||||
|
@ -75,7 +75,7 @@ TEST(SymbolicFactorGraph, eliminatePartialSequential) {
|
|||
|
||||
SymbolicBayesNet::shared_ptr actualBayesNet2;
|
||||
SymbolicFactorGraph::shared_ptr actualSfg2;
|
||||
boost::tie(actualBayesNet2, actualSfg2) =
|
||||
std::tie(actualBayesNet2, actualSfg2) =
|
||||
simpleTestGraph2.eliminatePartialSequential(Ordering{0, 1});
|
||||
|
||||
EXPECT(assert_equal(expectedSfg, *actualSfg2));
|
||||
|
@ -108,7 +108,7 @@ TEST(SymbolicFactorGraph, eliminatePartialMultifrontal) {
|
|||
|
||||
SymbolicBayesTree::shared_ptr actualBayesTree;
|
||||
SymbolicFactorGraph::shared_ptr actualFactorGraph;
|
||||
boost::tie(actualBayesTree, actualFactorGraph) =
|
||||
std::tie(actualBayesTree, actualFactorGraph) =
|
||||
simpleTestGraph2.eliminatePartialMultifrontal(Ordering{4, 5});
|
||||
|
||||
EXPECT(assert_equal(expectedFactorGraph, *actualFactorGraph));
|
||||
|
@ -124,7 +124,7 @@ TEST(SymbolicFactorGraph, eliminatePartialMultifrontal) {
|
|||
|
||||
SymbolicBayesTree::shared_ptr actualBayesTree2;
|
||||
SymbolicFactorGraph::shared_ptr actualFactorGraph2;
|
||||
boost::tie(actualBayesTree2, actualFactorGraph2) =
|
||||
std::tie(actualBayesTree2, actualFactorGraph2) =
|
||||
simpleTestGraph2.eliminatePartialMultifrontal(KeyVector{4, 5});
|
||||
|
||||
EXPECT(assert_equal(expectedFactorGraph, *actualFactorGraph2));
|
||||
|
|
|
@ -11,7 +11,6 @@
|
|||
#include <gtsam/discrete/DiscreteConditional.h>
|
||||
#include <gtsam/inference/VariableIndex.h>
|
||||
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
|
||||
|
@ -47,7 +46,7 @@ class LoopyBelief {
|
|||
void print(const std::string& s = "") const {
|
||||
cout << s << ":" << endl;
|
||||
star->print("Star graph: ");
|
||||
for (Key key : correctedBeliefIndices | boost::adaptors::map_keys) {
|
||||
for (const auto& [key, _] : correctedBeliefIndices) {
|
||||
cout << "Belief factor index for " << key << ": "
|
||||
<< correctedBeliefIndices.at(key) << endl;
|
||||
}
|
||||
|
@ -71,7 +70,7 @@ class LoopyBelief {
|
|||
/// print
|
||||
void print(const std::string& s = "") const {
|
||||
cout << s << ":" << endl;
|
||||
for (Key key : starGraphs_ | boost::adaptors::map_keys) {
|
||||
for (const auto& [key, _] : starGraphs_) {
|
||||
starGraphs_.at(key).print((boost::format("Node %d:") % key).str());
|
||||
}
|
||||
}
|
||||
|
@ -85,7 +84,7 @@ class LoopyBelief {
|
|||
DiscreteFactorGraph::shared_ptr beliefs(new DiscreteFactorGraph());
|
||||
std::map<Key, std::map<Key, DiscreteFactor::shared_ptr> > allMessages;
|
||||
// Eliminate each star graph
|
||||
for (Key key : starGraphs_ | boost::adaptors::map_keys) {
|
||||
for (const auto& [key, _] : starGraphs_) {
|
||||
// cout << "***** Node " << key << "*****" << endl;
|
||||
// initialize belief to the unary factor from the original graph
|
||||
DecisionTreeFactor::shared_ptr beliefAtKey;
|
||||
|
@ -94,15 +93,14 @@ class LoopyBelief {
|
|||
std::map<Key, DiscreteFactor::shared_ptr> messages;
|
||||
|
||||
// eliminate each neighbor in this star graph one by one
|
||||
for (Key neighbor : starGraphs_.at(key).correctedBeliefIndices |
|
||||
boost::adaptors::map_keys) {
|
||||
for (const auto& [neighbor, _] : starGraphs_.at(key).correctedBeliefIndices) {
|
||||
DiscreteFactorGraph subGraph;
|
||||
for (size_t factor : starGraphs_.at(key).varIndex_[neighbor]) {
|
||||
subGraph.push_back(starGraphs_.at(key).star->at(factor));
|
||||
}
|
||||
if (debug) subGraph.print("------- Subgraph:");
|
||||
DiscreteFactor::shared_ptr message;
|
||||
boost::tie(dummyCond, message) =
|
||||
std::tie(dummyCond, message) =
|
||||
EliminateDiscrete(subGraph, Ordering{neighbor});
|
||||
// store the new factor into messages
|
||||
messages.insert(make_pair(neighbor, message));
|
||||
|
@ -143,11 +141,10 @@ class LoopyBelief {
|
|||
|
||||
// Update corrected beliefs
|
||||
VariableIndex beliefFactors(*beliefs);
|
||||
for (Key key : starGraphs_ | boost::adaptors::map_keys) {
|
||||
for (const auto& [key, _] : starGraphs_) {
|
||||
std::map<Key, DiscreteFactor::shared_ptr> messages = allMessages[key];
|
||||
for (Key neighbor : starGraphs_.at(key).correctedBeliefIndices |
|
||||
boost::adaptors::map_keys) {
|
||||
DecisionTreeFactor correctedBelief =
|
||||
for (const auto& [neighbor, _] : starGraphs_.at(key).correctedBeliefIndices) {
|
||||
DecisionTreeFactor correctedBelief =
|
||||
(*std::dynamic_pointer_cast<DecisionTreeFactor>(
|
||||
beliefs->at(beliefFactors[key].front()))) /
|
||||
(*std::dynamic_pointer_cast<DecisionTreeFactor>(
|
||||
|
@ -175,7 +172,7 @@ class LoopyBelief {
|
|||
const std::map<Key, DiscreteKey>& allDiscreteKeys) const {
|
||||
StarGraphs starGraphs;
|
||||
VariableIndex varIndex(graph); ///< access to all factors of each node
|
||||
for (Key key : varIndex | boost::adaptors::map_keys) {
|
||||
for (const auto& [key, _] : varIndex) {
|
||||
// initialize to multiply with other unary factors later
|
||||
DecisionTreeFactor::shared_ptr prodOfUnaries;
|
||||
|
||||
|
|
|
@ -77,7 +77,7 @@ list<TimedOdometry> readOdometry() {
|
|||
|
||||
// load the ranges from TD
|
||||
// Time (sec) Sender / Antenna ID Receiver Node ID Range (m)
|
||||
typedef boost::tuple<double, size_t, double> RangeTriple;
|
||||
typedef std::tuple<double, size_t, double> RangeTriple;
|
||||
vector<RangeTriple> readTriples() {
|
||||
vector<RangeTriple> triples;
|
||||
string tdFile = findExampleDataFile("Plaza1_TD.txt");
|
||||
|
@ -165,7 +165,7 @@ int main(int argc, char** argv) {
|
|||
//--------------------------------- odometry loop -----------------------------------------
|
||||
double t;
|
||||
Pose2 odometry;
|
||||
boost::tie(t, odometry) = timedOdometry;
|
||||
std::tie(t, odometry) = timedOdometry;
|
||||
printf("step %d, time = %g\n",(int)i,t);
|
||||
|
||||
// add odometry factor
|
||||
|
@ -179,9 +179,9 @@ int main(int argc, char** argv) {
|
|||
landmarkEstimates.insert(i, predictedPose);
|
||||
|
||||
// Check if there are range factors to be added
|
||||
while (k < K && t >= boost::get<0>(triples[k])) {
|
||||
size_t j = boost::get<1>(triples[k]);
|
||||
double range = boost::get<2>(triples[k]);
|
||||
while (k < K && t >= std::get<0>(triples[k])) {
|
||||
size_t j = std::get<1>(triples[k]);
|
||||
double range = std::get<2>(triples[k]);
|
||||
if (i > start) {
|
||||
if (smart && totalCount < minK) {
|
||||
try {
|
||||
|
|
|
@ -76,7 +76,7 @@ list<TimedOdometry> readOdometry() {
|
|||
|
||||
// load the ranges from TD
|
||||
// Time (sec) Sender / Antenna ID Receiver Node ID Range (m)
|
||||
typedef boost::tuple<double, size_t, double> RangeTriple;
|
||||
typedef std::tuple<double, size_t, double> RangeTriple;
|
||||
vector<RangeTriple> readTriples() {
|
||||
vector<RangeTriple> triples;
|
||||
string tdFile = findExampleDataFile("Plaza2_TD.txt");
|
||||
|
@ -146,7 +146,7 @@ int main(int argc, char** argv) {
|
|||
//--------------------------------- odometry loop -----------------------------------------
|
||||
double t;
|
||||
Pose2 odometry;
|
||||
boost::tie(t, odometry) = timedOdometry;
|
||||
std::tie(t, odometry) = timedOdometry;
|
||||
|
||||
// add odometry factor
|
||||
newFactors.push_back(
|
||||
|
@ -160,9 +160,9 @@ int main(int argc, char** argv) {
|
|||
landmarkEstimates.insert(i, predictedPose);
|
||||
|
||||
// Check if there are range factors to be added
|
||||
while (k < K && t >= boost::get<0>(triples[k])) {
|
||||
size_t j = boost::get<1>(triples[k]);
|
||||
double range = boost::get<2>(triples[k]);
|
||||
while (k < K && t >= std::get<0>(triples[k])) {
|
||||
size_t j = std::get<1>(triples[k]);
|
||||
double range = std::get<2>(triples[k]);
|
||||
RangeFactor<Pose2, Point2> factor(i, symbol('L', j), range, rangeNoise);
|
||||
// Throw out obvious outliers based on current landmark estimates
|
||||
Vector error = factor.unwhitenedError(landmarkEstimates);
|
||||
|
|
|
@ -132,7 +132,7 @@ TEST(InvDepthFactor, backproject)
|
|||
|
||||
Vector5 actual_vec;
|
||||
double actual_inv;
|
||||
boost::tie(actual_vec, actual_inv) = inv_camera.backproject(z, 4);
|
||||
std::tie(actual_vec, actual_inv) = inv_camera.backproject(z, 4);
|
||||
EXPECT(assert_equal(expected,actual_vec,1e-7));
|
||||
EXPECT_DOUBLES_EQUAL(inv_depth,actual_inv,1e-7);
|
||||
}
|
||||
|
@ -148,7 +148,7 @@ TEST(InvDepthFactor, backproject2)
|
|||
|
||||
Vector5 actual_vec;
|
||||
double actual_inv;
|
||||
boost::tie(actual_vec, actual_inv) = inv_camera.backproject(z, 10);
|
||||
std::tie(actual_vec, actual_inv) = inv_camera.backproject(z, 10);
|
||||
EXPECT(assert_equal(expected,actual_vec,1e-7));
|
||||
EXPECT_DOUBLES_EQUAL(inv_depth,actual_inv,1e-7);
|
||||
}
|
||||
|
|
|
@ -45,7 +45,7 @@ namespace gtsam {
|
|||
*
|
||||
* We want the minimum of all those alphas among all inactive inequality.
|
||||
*/
|
||||
Template boost::tuple<double, int> This::computeStepSize(
|
||||
Template std::tuple<double, int> This::computeStepSize(
|
||||
const InequalityFactorGraph& workingSet, const VectorValues& xk,
|
||||
const VectorValues& p, const double& maxAlpha) const {
|
||||
double minAlpha = maxAlpha;
|
||||
|
@ -74,7 +74,7 @@ Template boost::tuple<double, int> This::computeStepSize(
|
|||
}
|
||||
}
|
||||
}
|
||||
return boost::make_tuple(minAlpha, closestFactorIx);
|
||||
return std::make_tuple(minAlpha, closestFactorIx);
|
||||
}
|
||||
|
||||
/******************************************************************************/
|
||||
|
@ -222,7 +222,7 @@ Template typename This::State This::iterate(
|
|||
double alpha;
|
||||
int factorIx;
|
||||
VectorValues p = newValues - state.values;
|
||||
boost::tie(alpha, factorIx) = // using 16.41
|
||||
std::tie(alpha, factorIx) = // using 16.41
|
||||
computeStepSize(state.workingSet, state.values, p, POLICY::maxAlpha);
|
||||
// also add to the working set the one that complains the most
|
||||
InequalityFactorGraph newWorkingSet = state.workingSet;
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
|
||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||
#include <gtsam_unstable/linear/InequalityFactorGraph.h>
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
@ -114,7 +113,7 @@ protected:
|
|||
* If there is a blocking constraint, the closest one will be added to the
|
||||
* working set and become active in the next iteration.
|
||||
*/
|
||||
boost::tuple<double, int> computeStepSize(
|
||||
std::tuple<double, int> computeStepSize(
|
||||
const InequalityFactorGraph& workingSet, const VectorValues& xk,
|
||||
const VectorValues& p, const double& maxAlpha) const;
|
||||
|
||||
|
@ -201,4 +200,4 @@ Key maxKey(const PROBLEM& problem) {
|
|||
|
||||
} // namespace gtsam
|
||||
|
||||
#include <gtsam_unstable/linear/ActiveSetSolver-inl.h>
|
||||
#include <gtsam_unstable/linear/ActiveSetSolver-inl.h>
|
||||
|
|
|
@ -66,7 +66,7 @@ GaussianFactorGraph::shared_ptr LPInitSolver::buildInitOfInitGraph() const {
|
|||
|
||||
// create factor ||x||^2 and add to the graph
|
||||
const KeyDimMap& constrainedKeyDim = lp_.constrainedKeyDimMap();
|
||||
for (Key key : constrainedKeyDim | boost::adaptors::map_keys) {
|
||||
for (const auto& [key, _] : constrainedKeyDim) {
|
||||
size_t dim = constrainedKeyDim.at(key);
|
||||
initGraph->push_back(
|
||||
JacobianFactor(key, Matrix::Identity(dim, dim), Vector::Zero(dim)));
|
||||
|
@ -107,4 +107,4 @@ InequalityFactorGraph LPInitSolver::addSlackVariableToInequalities(
|
|||
return slackInequalities;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,9 +30,6 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using namespace gtsam::symbol_shorthand;
|
||||
|
@ -73,7 +70,7 @@ TEST(LPInitSolver, InfiniteLoopSingleVar) {
|
|||
VectorValues starter;
|
||||
starter.insert(1, Vector3(0, 0, 2));
|
||||
VectorValues results, duals;
|
||||
boost::tie(results, duals) = solver.optimize(starter);
|
||||
std::tie(results, duals) = solver.optimize(starter);
|
||||
VectorValues expected;
|
||||
expected.insert(1, Vector3(13.5, 6.5, -6.5));
|
||||
CHECK(assert_equal(results, expected, 1e-7));
|
||||
|
@ -101,7 +98,7 @@ TEST(LPInitSolver, InfiniteLoopMultiVar) {
|
|||
starter.insert(Y, kZero);
|
||||
starter.insert(Z, Vector::Constant(1, 2.0));
|
||||
VectorValues results, duals;
|
||||
boost::tie(results, duals) = solver.optimize(starter);
|
||||
std::tie(results, duals) = solver.optimize(starter);
|
||||
VectorValues expected;
|
||||
expected.insert(X, Vector::Constant(1, 13.5));
|
||||
expected.insert(Y, Vector::Constant(1, 6.5));
|
||||
|
@ -201,7 +198,7 @@ TEST(LPSolver, SimpleTest1) {
|
|||
CHECK(assert_equal(expected_x1, x1, 1e-10));
|
||||
|
||||
VectorValues result, duals;
|
||||
boost::tie(result, duals) = lpSolver.optimize(init);
|
||||
std::tie(result, duals) = lpSolver.optimize(init);
|
||||
VectorValues expectedResult;
|
||||
expectedResult.insert(1, Vector2(8. / 3., 2. / 3.));
|
||||
CHECK(assert_equal(expectedResult, result, 1e-10));
|
||||
|
@ -213,7 +210,7 @@ TEST(LPSolver, TestWithoutInitialValues) {
|
|||
LPSolver lpSolver(lp);
|
||||
VectorValues result, duals, expectedResult;
|
||||
expectedResult.insert(1, Vector2(8. / 3., 2. / 3.));
|
||||
boost::tie(result, duals) = lpSolver.optimize();
|
||||
std::tie(result, duals) = lpSolver.optimize();
|
||||
CHECK(assert_equal(expectedResult, result));
|
||||
}
|
||||
|
||||
|
|
|
@ -29,8 +29,9 @@ GTSAM_CONCEPT_TESTABLE_INST(LinearEquality)
|
|||
namespace {
|
||||
namespace simple {
|
||||
// Terms we'll use
|
||||
const vector<pair<Key, Matrix> > terms{
|
||||
make_pair(5, I_3x3), make_pair(10, 2 * I_3x3), make_pair(15, 3 * I_3x3)};
|
||||
using Terms = vector<pair<Key, Matrix> >;
|
||||
const Terms terms{make_pair(5, I_3x3), make_pair(10, 2 * I_3x3),
|
||||
make_pair(15, 3 * I_3x3)};
|
||||
|
||||
// RHS and sigmas
|
||||
const Vector b = (Vector(3) << 1., 2., 3.).finished();
|
||||
|
@ -45,8 +46,7 @@ TEST(LinearEquality, constructors_and_accessors) {
|
|||
// Test for using different numbers of terms
|
||||
{
|
||||
// One term constructor
|
||||
LinearEquality expected(
|
||||
boost::make_iterator_range(terms.begin(), terms.begin() + 1), b, 0);
|
||||
LinearEquality expected(Terms(terms.begin(), terms.begin() + 1), b, 0);
|
||||
LinearEquality actual(terms[0].first, terms[0].second, b, 0);
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
LONGS_EQUAL((long)terms[0].first, (long)actual.keys().back());
|
||||
|
@ -57,8 +57,7 @@ TEST(LinearEquality, constructors_and_accessors) {
|
|||
}
|
||||
{
|
||||
// Two term constructor
|
||||
LinearEquality expected(
|
||||
boost::make_iterator_range(terms.begin(), terms.begin() + 2), b, 0);
|
||||
LinearEquality expected(Terms(terms.begin(), terms.begin() + 2), b, 0);
|
||||
LinearEquality actual(terms[0].first, terms[0].second, terms[1].first,
|
||||
terms[1].second, b, 0);
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
|
@ -70,8 +69,7 @@ TEST(LinearEquality, constructors_and_accessors) {
|
|||
}
|
||||
{
|
||||
// Three term constructor
|
||||
LinearEquality expected(
|
||||
boost::make_iterator_range(terms.begin(), terms.begin() + 3), b, 0);
|
||||
LinearEquality expected(Terms(terms.begin(), terms.begin() + 3), b, 0);
|
||||
LinearEquality actual(terms[0].first, terms[0].second, terms[1].first,
|
||||
terms[1].second, terms[2].first, terms[2].second, b,
|
||||
0);
|
||||
|
@ -204,7 +202,7 @@ TEST(LinearEquality, operators) {
|
|||
// test gradient at zero
|
||||
Matrix A;
|
||||
Vector b2;
|
||||
boost::tie(A, b2) = lf.jacobian();
|
||||
std::tie(A, b2) = lf.jacobian();
|
||||
VectorValues expectedG;
|
||||
expectedG.insert(1, (Vector(2) << 0.2, -0.1).finished());
|
||||
expectedG.insert(2, (Vector(2) << -0.2, 0.1).finished());
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
#include <iostream>
|
||||
#include <vector>
|
||||
#include <optional>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
#include <boost/shared_array.hpp>
|
||||
|
||||
#include <gtsam/base/timing.h>
|
||||
|
@ -252,7 +251,7 @@ namespace gtsam { namespace partition {
|
|||
// run ND on the graph
|
||||
size_t sepsize;
|
||||
sharedInts part;
|
||||
boost::tie(sepsize, part) = separatorMetis(numKeys, xadj, adjncy, adjwgt, verbose);
|
||||
std::tie(sepsize, part) = separatorMetis(numKeys, xadj, adjncy, adjwgt, verbose);
|
||||
if (!sepsize) return std::optional<MetisResult>();
|
||||
|
||||
// convert the 0-1-2 from Metis to 1-2-0, so that the separator is 0, as later
|
||||
|
@ -312,7 +311,7 @@ namespace gtsam { namespace partition {
|
|||
// run metis on the graph
|
||||
int edgecut;
|
||||
sharedInts part;
|
||||
boost::tie(edgecut, part) = edgeMetis(numKeys, xadj, adjncy, adjwgt, verbose);
|
||||
std::tie(edgecut, part) = edgeMetis(numKeys, xadj, adjncy, adjwgt, verbose);
|
||||
|
||||
// convert the 0-1-2 from Metis to 1-2-0, so that the separator is 0, as later we will have more submaps
|
||||
MetisResult result;
|
||||
|
|
|
@ -22,7 +22,7 @@ namespace gtsam { namespace partition {
|
|||
fg_(fg), ordering_(ordering){
|
||||
GenericUnaryGraph unaryFactors;
|
||||
GenericGraph gfg;
|
||||
boost::tie(unaryFactors, gfg) = fg.createGenericGraph(ordering);
|
||||
std::tie(unaryFactors, gfg) = fg.createGenericGraph(ordering);
|
||||
|
||||
// build reverse mapping from integer to symbol
|
||||
int numNodes = ordering.size();
|
||||
|
@ -46,7 +46,7 @@ namespace gtsam { namespace partition {
|
|||
const NLG& fg, const Ordering& ordering, const std::shared_ptr<Cuts>& cuts, const bool verbose) : fg_(fg), ordering_(ordering){
|
||||
GenericUnaryGraph unaryFactors;
|
||||
GenericGraph gfg;
|
||||
boost::tie(unaryFactors, gfg) = fg.createGenericGraph(ordering);
|
||||
std::tie(unaryFactors, gfg) = fg.createGenericGraph(ordering);
|
||||
|
||||
// build reverse mapping from integer to symbol
|
||||
int numNodes = ordering.size();
|
||||
|
|
|
@ -74,7 +74,7 @@ TEST (AHRS, Mechanization_integrate) {
|
|||
AHRS ahrs = AHRS(stationaryU,stationaryF,g_e);
|
||||
Mechanization_bRn2 mech;
|
||||
KalmanFilter::State state;
|
||||
// boost::tie(mech,state) = ahrs.initialize(g_e);
|
||||
// std::tie(mech,state) = ahrs.initialize(g_e);
|
||||
// Vector u = Vector3(0.05,0.0,0.0);
|
||||
// double dt = 2;
|
||||
// Rot3 expected;
|
||||
|
|
|
@ -235,7 +235,7 @@ TEST(ExpressionFactor, Shallow) {
|
|||
// Get and check keys and dims
|
||||
KeyVector keys;
|
||||
FastVector<int> dims;
|
||||
boost::tie(keys, dims) = expression.keysAndDims();
|
||||
std::tie(keys, dims) = expression.keysAndDims();
|
||||
LONGS_EQUAL(2,keys.size());
|
||||
LONGS_EQUAL(2,dims.size());
|
||||
LONGS_EQUAL(1,keys[0]);
|
||||
|
|
|
@ -26,10 +26,6 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
namespace br { using namespace boost::range; using namespace boost::adaptors; }
|
||||
|
||||
#include <string.h>
|
||||
#include <iostream>
|
||||
|
||||
|
@ -117,7 +113,7 @@ TEST(GaussianFactorGraph, eliminateOne_x1_fast) {
|
|||
GaussianFactorGraph fg = createGaussianFactorGraph();
|
||||
GaussianConditional::shared_ptr conditional;
|
||||
JacobianFactor::shared_ptr remaining;
|
||||
boost::tie(conditional, remaining) = EliminateQR(fg, Ordering{X(1)});
|
||||
std::tie(conditional, remaining) = EliminateQR(fg, Ordering{X(1)});
|
||||
|
||||
// create expected Conditional Gaussian
|
||||
Matrix I = 15 * I_2x2, R11 = I, S12 = -0.111111 * I, S13 = -0.444444 * I;
|
||||
|
@ -295,7 +291,7 @@ TEST(GaussianFactorGraph, elimination) {
|
|||
// Check matrix
|
||||
Matrix R;
|
||||
Vector d;
|
||||
boost::tie(R, d) = bayesNet.matrix();
|
||||
std::tie(R, d) = bayesNet.matrix();
|
||||
Matrix expected =
|
||||
(Matrix(2, 2) << 0.707107, -0.353553, 0.0, 0.612372).finished();
|
||||
Matrix expected2 =
|
||||
|
@ -450,7 +446,7 @@ TEST( GaussianFactorGraph, conditional_sigma_failure) {
|
|||
GaussianBayesTree actBT = *lfg.eliminateMultifrontal();
|
||||
|
||||
// Check that all sigmas in an unconstrained bayes tree are set to one
|
||||
for(const GaussianBayesTree::sharedClique& clique: actBT.nodes() | br::map_values) {
|
||||
for (const auto& [key, clique]: actBT.nodes()) {
|
||||
GaussianConditional::shared_ptr conditional = clique->conditional();
|
||||
//size_t dim = conditional->rows();
|
||||
//EXPECT(assert_equal(gtsam::Vector::Ones(dim), conditional->get_model()->sigmas(), tol));
|
||||
|
|
|
@ -22,9 +22,6 @@
|
|||
#include <gtsam/linear/GaussianISAM.h>
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
namespace br { using namespace boost::adaptors; using namespace boost::range; }
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using namespace example;
|
||||
|
@ -53,7 +50,7 @@ TEST( ISAM, iSAM_smoother )
|
|||
GaussianBayesTree expected = *smoother.eliminateMultifrontal(ordering);
|
||||
|
||||
// Verify sigmas in the bayes tree
|
||||
for(const GaussianBayesTree::sharedClique& clique: expected.nodes() | br::map_values) {
|
||||
for (const auto& [key, clique] : expected.nodes()) {
|
||||
GaussianConditional::shared_ptr conditional = clique->conditional();
|
||||
EXPECT(!conditional->get_model());
|
||||
}
|
||||
|
|
|
@ -24,8 +24,6 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
namespace br { using namespace boost::adaptors; using namespace boost::range; }
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
@ -249,7 +247,7 @@ bool isam_check(const NonlinearFactorGraph& fullgraph, const Values& fullinit, c
|
|||
// Check gradient at each node
|
||||
bool nodeGradientsOk = true;
|
||||
typedef ISAM2::sharedClique sharedClique;
|
||||
for(const sharedClique& clique: isam.nodes() | br::map_values) {
|
||||
for (const auto& [key, clique] : isam.nodes()) {
|
||||
// Compute expected gradient
|
||||
GaussianFactorGraph jfg;
|
||||
jfg += clique->conditional();
|
||||
|
@ -453,7 +451,7 @@ TEST(ISAM2, swapFactors)
|
|||
|
||||
// Check gradient at each node
|
||||
typedef ISAM2::sharedClique sharedClique;
|
||||
for(const sharedClique& clique: isam.nodes() | br::map_values) {
|
||||
for (const auto& [key, clique]: isam.nodes()) {
|
||||
// Compute expected gradient
|
||||
GaussianFactorGraph jfg;
|
||||
jfg += clique->conditional();
|
||||
|
@ -578,7 +576,7 @@ TEST(ISAM2, constrained_ordering)
|
|||
|
||||
// Check gradient at each node
|
||||
typedef ISAM2::sharedClique sharedClique;
|
||||
for(const sharedClique& clique: isam.nodes() | br::map_values) {
|
||||
for (const auto& [key, clique]: isam.nodes()) {
|
||||
// Compute expected gradient
|
||||
GaussianFactorGraph jfg;
|
||||
jfg += clique->conditional();
|
||||
|
@ -620,9 +618,11 @@ namespace {
|
|||
bool checkMarginalizeLeaves(ISAM2& isam, const FastList<Key>& leafKeys) {
|
||||
Matrix expectedAugmentedHessian, expected3AugmentedHessian;
|
||||
KeyVector toKeep;
|
||||
for(Key j: isam.getDelta() | br::map_keys)
|
||||
if(find(leafKeys.begin(), leafKeys.end(), j) == leafKeys.end())
|
||||
toKeep.push_back(j);
|
||||
for (const auto& [key, clique]: isam.getDelta()) {
|
||||
if(find(leafKeys.begin(), leafKeys.end(), key) == leafKeys.end()) {
|
||||
toKeep.push_back(key);
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate expected marginal from iSAM2 tree
|
||||
expectedAugmentedHessian = GaussianFactorGraph(isam).marginal(toKeep, EliminateQR)->augmentedHessian();
|
||||
|
|
|
@ -67,7 +67,7 @@ TEST( GaussianJunctionTreeB, constructor2 ) {
|
|||
// create a graph
|
||||
NonlinearFactorGraph nlfg;
|
||||
Values values;
|
||||
boost::tie(nlfg, values) = createNonlinearSmoother(7);
|
||||
std::tie(nlfg, values) = createNonlinearSmoother(7);
|
||||
SymbolicFactorGraph::shared_ptr symbolic = nlfg.symbolic();
|
||||
|
||||
// linearize
|
||||
|
@ -130,7 +130,7 @@ TEST( GaussianJunctionTreeB, constructor2 ) {
|
|||
// // create a graph
|
||||
// GaussianFactorGraph fg;
|
||||
// Ordering ordering;
|
||||
// boost::tie(fg,ordering) = createSmoother(7);
|
||||
// std::tie(fg,ordering) = createSmoother(7);
|
||||
//
|
||||
// // optimize the graph
|
||||
// GaussianJunctionTree tree(fg);
|
||||
|
|
|
@ -739,7 +739,7 @@ TEST(GncOptimizer, optimizeSmallPoseGraph) {
|
|||
const string filename = findExampleDataFile("w100.graph");
|
||||
NonlinearFactorGraph::shared_ptr graph;
|
||||
Values::shared_ptr initial;
|
||||
boost::tie(graph, initial) = load2D(filename);
|
||||
std::tie(graph, initial) = load2D(filename);
|
||||
// Add a Gaussian prior on first poses
|
||||
Pose2 priorMean(0.0, 0.0, 0.0); // prior at origin
|
||||
SharedDiagonal priorNoise = noiseModel::Diagonal::Sigmas(
|
||||
|
|
|
@ -20,13 +20,12 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
||||
// Generate a small PoseSLAM problem
|
||||
boost::tuple<NonlinearFactorGraph, Values> generateProblem() {
|
||||
std::tuple<NonlinearFactorGraph, Values> generateProblem() {
|
||||
|
||||
// 1. Create graph container and add factors to it
|
||||
NonlinearFactorGraph graph;
|
||||
|
@ -64,7 +63,7 @@ boost::tuple<NonlinearFactorGraph, Values> generateProblem() {
|
|||
Pose2 x5(2.1, 2.1, -M_PI_2);
|
||||
initialEstimate.insert(5, x5);
|
||||
|
||||
return boost::tie(graph, initialEstimate);
|
||||
return std::tie(graph, initialEstimate);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -73,7 +72,7 @@ TEST(NonlinearConjugateGradientOptimizer, Optimize) {
|
|||
NonlinearFactorGraph graph;
|
||||
Values initialEstimate;
|
||||
|
||||
boost::tie(graph, initialEstimate) = generateProblem();
|
||||
std::tie(graph, initialEstimate) = generateProblem();
|
||||
// cout << "initial error = " << graph.error(initialEstimate) << endl;
|
||||
|
||||
NonlinearOptimizerParams param;
|
||||
|
|
|
@ -68,7 +68,7 @@ TEST( Graph, predecessorMap2Graph )
|
|||
p_map.insert(1, 2);
|
||||
p_map.insert(2, 2);
|
||||
p_map.insert(3, 2);
|
||||
boost::tie(graph, root, key2vertex) = predecessorMap2Graph<SGraph<Key>, SVertex, Key>(p_map);
|
||||
std::tie(graph, root, key2vertex) = predecessorMap2Graph<SGraph<Key>, SVertex, Key>(p_map);
|
||||
|
||||
LONGS_EQUAL(3, (long)boost::num_vertices(graph));
|
||||
CHECK(root == key2vertex[2]);
|
||||
|
@ -174,7 +174,7 @@ TEST( GaussianFactorGraph, findMinimumSpanningTree )
|
|||
// G.push_factor("x3", "x4");
|
||||
//
|
||||
// SymbolicFactorGraph T, C;
|
||||
// boost::tie(T, C) = G.splitMinimumSpanningTree();
|
||||
// std::tie(T, C) = G.splitMinimumSpanningTree();
|
||||
//
|
||||
// SymbolicFactorGraph expectedT, expectedC;
|
||||
// expectedT.push_factor("x1", "x2");
|
||||
|
@ -207,7 +207,7 @@ TEST( GaussianFactorGraph, findMinimumSpanningTree )
|
|||
//
|
||||
// SymbolicFactorGraph singletonGraph;
|
||||
// set<Symbol> singletons;
|
||||
// boost::tie(singletonGraph, singletons) = G.removeSingletons();
|
||||
// std::tie(singletonGraph, singletons) = G.removeSingletons();
|
||||
//
|
||||
// set<Symbol> singletons_excepted; singletons_excepted += "x1", "x2", "x5", "l1", "l3";
|
||||
// CHECK(singletons_excepted == singletons);
|
||||
|
|
|
@ -62,7 +62,7 @@ TEST( Iterative, conjugateGradientDescent )
|
|||
Matrix A;
|
||||
Vector b;
|
||||
Vector x0 = Z_6x1;
|
||||
boost::tie(A, b) = fg.jacobian();
|
||||
std::tie(A, b) = fg.jacobian();
|
||||
Vector expectedX = (Vector(6) << -0.1, 0.1, -0.1, -0.1, 0.1, -0.2).finished();
|
||||
|
||||
// Do conjugate gradient descent, System version
|
||||
|
|
|
@ -31,8 +31,6 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
using boost::adaptors::map_values;
|
||||
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
|
@ -302,7 +300,9 @@ TEST_UNSAFE(NonlinearOptimizer, MoreOptimization) {
|
|||
GaussianFactorGraph::shared_ptr linear = optimizer.linearize();
|
||||
VectorValues d = linear->hessianDiagonal();
|
||||
VectorValues sqrtHessianDiagonal = d;
|
||||
for (Vector& v : sqrtHessianDiagonal | map_values) v = v.cwiseSqrt();
|
||||
for (auto& [key, value] : sqrtHessianDiagonal) {
|
||||
value = value.cwiseSqrt();
|
||||
}
|
||||
GaussianFactorGraph damped = optimizer.buildDampedSystem(*linear, sqrtHessianDiagonal);
|
||||
VectorValues expectedDiagonal = d + params.lambdaInitial * d;
|
||||
EXPECT(assert_equal(expectedDiagonal, damped.hessianDiagonal()));
|
||||
|
|
|
@ -29,9 +29,6 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/range/adaptor/reversed.hpp>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
|
||||
#include <fstream>
|
||||
|
||||
using namespace std;
|
||||
|
@ -67,7 +64,7 @@ TEST(SubgraphPreconditioner, planarGraph) {
|
|||
// Check planar graph construction
|
||||
GaussianFactorGraph A;
|
||||
VectorValues xtrue;
|
||||
boost::tie(A, xtrue) = planarGraph(3);
|
||||
std::tie(A, xtrue) = planarGraph(3);
|
||||
LONGS_EQUAL(13, A.size());
|
||||
LONGS_EQUAL(9, xtrue.size());
|
||||
DOUBLES_EQUAL(0, error(A, xtrue), 1e-9); // check zero error for xtrue
|
||||
|
@ -83,11 +80,11 @@ TEST(SubgraphPreconditioner, splitOffPlanarTree) {
|
|||
// Build a planar graph
|
||||
GaussianFactorGraph A;
|
||||
VectorValues xtrue;
|
||||
boost::tie(A, xtrue) = planarGraph(3);
|
||||
std::tie(A, xtrue) = planarGraph(3);
|
||||
|
||||
// Get the spanning tree and constraints, and check their sizes
|
||||
GaussianFactorGraph T, C;
|
||||
boost::tie(T, C) = splitOffPlanarTree(3, A);
|
||||
std::tie(T, C) = splitOffPlanarTree(3, A);
|
||||
LONGS_EQUAL(9, T.size());
|
||||
LONGS_EQUAL(4, C.size());
|
||||
|
||||
|
@ -103,11 +100,11 @@ TEST(SubgraphPreconditioner, system) {
|
|||
GaussianFactorGraph Ab;
|
||||
VectorValues xtrue;
|
||||
size_t N = 3;
|
||||
boost::tie(Ab, xtrue) = planarGraph(N); // A*x-b
|
||||
std::tie(Ab, xtrue) = planarGraph(N); // A*x-b
|
||||
|
||||
// Get the spanning tree and remaining graph
|
||||
GaussianFactorGraph Ab1, Ab2; // A1*x-b1 and A2*x-b2
|
||||
boost::tie(Ab1, Ab2) = splitOffPlanarTree(N, Ab);
|
||||
std::tie(Ab1, Ab2) = splitOffPlanarTree(N, Ab);
|
||||
|
||||
// Eliminate the spanning tree to build a prior
|
||||
const Ordering ord = planarOrdering(N);
|
||||
|
@ -199,11 +196,11 @@ TEST(SubgraphPreconditioner, conjugateGradients) {
|
|||
GaussianFactorGraph Ab;
|
||||
VectorValues xtrue;
|
||||
size_t N = 3;
|
||||
boost::tie(Ab, xtrue) = planarGraph(N); // A*x-b
|
||||
std::tie(Ab, xtrue) = planarGraph(N); // A*x-b
|
||||
|
||||
// Get the spanning tree
|
||||
GaussianFactorGraph Ab1, Ab2; // A1*x-b1 and A2*x-b2
|
||||
boost::tie(Ab1, Ab2) = splitOffPlanarTree(N, Ab);
|
||||
std::tie(Ab1, Ab2) = splitOffPlanarTree(N, Ab);
|
||||
|
||||
// Eliminate the spanning tree to build a prior
|
||||
GaussianBayesNet Rc1 = *Ab1.eliminateSequential(); // R1*x-c1
|
||||
|
|
|
@ -21,7 +21,6 @@
|
|||
#include <iostream>
|
||||
using namespace std;
|
||||
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
|
||||
#include <gtsam/base/Matrix.h>
|
||||
#include <gtsam/linear/JacobianFactor.h>
|
||||
|
@ -106,7 +105,7 @@ int main()
|
|||
JacobianFactor::shared_ptr factor;
|
||||
|
||||
for(int i = 0; i < n; i++)
|
||||
boost::tie(conditional, factor) =
|
||||
std::tie(conditional, factor) =
|
||||
JacobianFactor(combined).eliminate(Ordering{_x2_});
|
||||
|
||||
long timeLog2 = clock();
|
||||
|
|
|
@ -63,7 +63,7 @@ double timePlanarSmootherEliminate(int N, bool old = true) {
|
|||
//double timePlanarSmootherJoinAug(int N, size_t reps) {
|
||||
// GaussianFactorGraph fgBase;
|
||||
// VectorValues config;
|
||||
// boost::tie(fgBase,config) = planarGraph(N);
|
||||
// std::tie(fgBase,config) = planarGraph(N);
|
||||
// Ordering ordering = fgBase.getOrdering();
|
||||
// Symbol key = ordering.front();
|
||||
//
|
||||
|
@ -96,7 +96,7 @@ double timePlanarSmootherEliminate(int N, bool old = true) {
|
|||
//double timePlanarSmootherCombined(int N, size_t reps) {
|
||||
// GaussianFactorGraph fgBase;
|
||||
// VectorValues config;
|
||||
// boost::tie(fgBase,config) = planarGraph(N);
|
||||
// std::tie(fgBase,config) = planarGraph(N);
|
||||
// Ordering ordering = fgBase.getOrdering();
|
||||
// Symbol key = ordering.front();
|
||||
//
|
||||
|
|
|
@ -27,7 +27,6 @@
|
|||
#include <boost/archive/binary_oarchive.hpp>
|
||||
#include <boost/archive/binary_iarchive.hpp>
|
||||
#include <boost/serialization/export.hpp>
|
||||
#include <boost/range/adaptor/reversed.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
@ -225,9 +224,14 @@ int main(int argc, char *argv[]) {
|
|||
try {
|
||||
Marginals marginals(graph, values);
|
||||
int i=0;
|
||||
for (Key key1: boost::adaptors::reverse(values.keys())) {
|
||||
// Assign the keyvector to a named variable
|
||||
auto keys = values.keys();
|
||||
// Iterate over it in reverse
|
||||
for (auto it1 = keys.rbegin(); it1 != keys.rend(); ++it1) {
|
||||
Key key1 = *it1;
|
||||
int j=0;
|
||||
for (Key key2: boost::adaptors::reverse(values.keys())) {
|
||||
for (auto it2 = keys.rbegin(); it2 != keys.rend(); ++it2) {
|
||||
Key key2 = *it2;
|
||||
if(i != j) {
|
||||
gttic_(jointMarginalInformation);
|
||||
KeyVector keys(2);
|
||||
|
|
|
@ -37,7 +37,7 @@ int main(int argc, char *argv[]) {
|
|||
NonlinearFactorGraph::shared_ptr g;
|
||||
string inputFile = findExampleDataFile("w10000");
|
||||
auto model = noiseModel::Diagonal::Sigmas((Vector(3) << 0.05, 0.05, 5.0 * M_PI / 180.0).finished());
|
||||
boost::tie(g, solution) = load2D(inputFile, model);
|
||||
std::tie(g, solution) = load2D(inputFile, model);
|
||||
|
||||
// add noise to create initial estimate
|
||||
Values initial;
|
||||
|
|
Loading…
Reference in New Issue