Merge pull request #1437 from borglab/feature/pairs

release/4.3a0
Frank Dellaert 2023-02-04 21:03:21 -08:00 committed by GitHub
commit 130edcd1ec
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
96 changed files with 306 additions and 611 deletions

View File

@ -87,9 +87,8 @@ int main(int argc, char* argv[]) {
result.print();
cout << "Detailed results:" << endl;
for (auto keyedStatus : result.detail->variableStatus) {
const auto& status = keyedStatus.second;
PrintKey(keyedStatus.first);
for (auto& [key, status] : result.detail->variableStatus) {
PrintKey(key);
cout << " {" << endl;
cout << "reeliminated: " << status.isReeliminated << endl;
cout << "relinearized above thresh: " << status.isAboveRelinThreshold

View File

@ -74,8 +74,8 @@ int main(const int argc, const char* argv[]) {
// Calculate and print marginal covariances for all variables
Marginals marginals(*graph, result);
for (const auto& key_pose : result.extract<Pose3>()) {
std::cout << marginals.marginalCovariance(key_pose.first) << endl;
for (const auto& [key, pose] : result.extract<Pose3>()) {
std::cout << marginals.marginalCovariance(key) << endl;
}
return 0;
}

View File

@ -79,9 +79,7 @@ int main(int argc, char* argv[]) {
for (const SfmTrack& track : mydata.tracks) {
// Leaf expression for j^th point
Point3_ point_('p', j);
for (const SfmMeasurement& m : track.measurements) {
size_t i = m.first;
Point2 uv = m.second;
for (const auto& [i, uv] : track.measurements) {
// Leaf expression for i^th camera
Expression<SfmCamera> camera_(C(i));
// Below an expression for the prediction of the measurement:

View File

@ -57,9 +57,7 @@ int main (int argc, char* argv[]) {
// Add measurements to the factor graph
size_t j = 0;
for(const SfmTrack& track: mydata.tracks) {
for(const SfmMeasurement& m: track.measurements) {
size_t i = m.first;
Point2 uv = m.second;
for (const auto& [i, uv] : track.measurements) {
graph.emplace_shared<MyFactor>(uv, noise, C(i), P(j)); // note use of shorthand symbols C and P
}
j += 1;

View File

@ -59,9 +59,7 @@ int main(int argc, char* argv[]) {
// Add measurements to the factor graph
size_t j = 0;
for (const SfmTrack& track : mydata.tracks) {
for (const SfmMeasurement& m : track.measurements) {
size_t i = m.first;
Point2 uv = m.second;
for (const auto& [i, uv] : track.measurements) {
graph.emplace_shared<MyFactor>(
uv, noise, C(i), P(j)); // note use of shorthand symbols C and P
}

View File

@ -557,12 +557,12 @@ void runPerturb()
// Perturb values
VectorValues noise;
for(const auto& key_dim: initial.dims())
for(const auto& [key, dim]: initial.dims())
{
Vector noisev(key_dim.second);
Vector noisev(dim);
for(Vector::Index i = 0; i < noisev.size(); ++i)
noisev(i) = normal(rng);
noise.insert(key_dim.first, noisev);
noise.insert(key, noisev);
}
Values perturbed = initial.retract(noise);

View File

@ -50,7 +50,7 @@ class DSFMap {
iterator it = entries_.find(key);
// if key does not exist, create and return itself
if (it == entries_.end()) {
it = entries_.insert(std::make_pair(key, empty)).first;
it = entries_.insert({key, empty}).first;
it->second.parent_ = it;
it->second.rank_ = 0;
}

View File

@ -63,7 +63,7 @@ public:
}
/** Handy 'insert' function for Matlab wrapper */
bool insert2(const KEY& key, const VALUE& val) { return Base::insert(std::make_pair(key, val)).second; }
bool insert2(const KEY& key, const VALUE& val) { return Base::insert({key, val}).second; }
/** Handy 'exists' function */
bool exists(const KEY& e) const { return this->find(e) != this->end(); }

View File

@ -249,8 +249,7 @@ pair<Matrix,Matrix> qr(const Matrix& A) {
xjm(k) = R(j+k, j);
// calculate the Householder vector v
double beta; Vector vjm;
std::tie(beta,vjm) = house(xjm);
const auto [beta, vjm] = house(xjm);
// pad with zeros to get m-dimensional vector v
for(size_t k = 0 ; k < m; k++)

View File

@ -69,7 +69,7 @@ class WeightedSampler {
static const double kexp1 = std::exp(1.0);
for (auto it = weights.begin(); it != weights.begin() + numSamples; ++it) {
const double k_i = kexp1 / *it;
reservoir.push(std::make_pair(k_i, it - weights.begin() + 1));
reservoir.push({k_i, it - weights.begin() + 1});
}
// Step 4: Repeat Steps 510 until the population is exhausted
@ -110,7 +110,7 @@ class WeightedSampler {
// Step 8: The item in reservoir with the minimum key is replaced by
// item v_i
reservoir.pop();
reservoir.push(std::make_pair(k_i, it - weights.begin() + 1));
reservoir.push({k_i, it - weights.begin() + 1});
}
}

View File

@ -857,8 +857,7 @@ TEST(Matrix, qr )
Matrix expectedR = (Matrix(6, 4) << 15, 0, -8.3333, 0, 00, 11.1803, 0, -2.2361, 00, 0,
7.4536, 0, 00, 0, 0, 10.9545, 00, 0, 0, 0, 00, 0, 0, 0).finished();
Matrix Q, R;
std::tie(Q, R) = qr(A);
const auto [Q, R] = qr(A);
EXPECT(assert_equal(expectedQ, Q, 1e-4));
EXPECT(assert_equal(expectedR, R, 1e-4));
EXPECT(assert_equal(A, Q*R, 1e-14));
@ -915,10 +914,7 @@ TEST(Matrix, weighted_elimination )
// unpack and verify
size_t i = 0;
for (const auto& tuple : solution) {
Vector r;
double di, sigma;
std::tie(r, di, sigma) = tuple;
for (const auto& [r, di, sigma] : solution) {
EXPECT(assert_equal(r, expectedR.row(i))); // verify r
DOUBLES_EQUAL(d(i), di, 1e-8); // verify d
DOUBLES_EQUAL(newSigmas(i), sigma, 1e-5); // verify sigma
@ -1142,10 +1138,7 @@ TEST(Matrix, DLT )
1.89, 2.24, 3.99, 3.24, 3.84, 6.84, 18.09, 21.44, 38.19,
2.24, 2.48, 6.24, 3.08, 3.41, 8.58, 24.64, 27.28, 68.64
).finished();
int rank;
double error;
Vector actual;
std::tie(rank,error,actual) = DLT(A);
const auto [rank,error,actual] = DLT(A);
Vector expected = (Vector(9) << -0.0, 0.2357, 0.4714, -0.2357, 0.0, - 0.4714,-0.4714, 0.4714, 0.0).finished();
EXPECT_LONGS_EQUAL(8,rank);
EXPECT_DOUBLES_EQUAL(0,error,1e-8);

View File

@ -154,8 +154,7 @@ TEST(Vector, weightedPseudoinverse )
Vector weights = sigmas.array().square().inverse();
// perform solve
Vector actual; double precision;
std::tie(actual, precision) = weightedPseudoinverse(x, weights);
const auto [actual, precision] = weightedPseudoinverse(x, weights);
// construct expected
Vector expected(2);
@ -179,8 +178,7 @@ TEST(Vector, weightedPseudoinverse_constraint )
sigmas(0) = 0.0; sigmas(1) = 0.2;
Vector weights = sigmas.array().square().inverse();
// perform solve
Vector actual; double precision;
std::tie(actual, precision) = weightedPseudoinverse(x, weights);
const auto [actual, precision] = weightedPseudoinverse(x, weights);
// construct expected
Vector expected(2);
@ -197,8 +195,7 @@ TEST(Vector, weightedPseudoinverse_nan )
Vector a = (Vector(4) << 1., 0., 0., 0.).finished();
Vector sigmas = (Vector(4) << 0.1, 0.1, 0., 0.).finished();
Vector weights = sigmas.array().square().inverse();
Vector pseudo; double precision;
std::tie(pseudo, precision) = weightedPseudoinverse(a, weights);
const auto [pseudo, precision] = weightedPseudoinverse(a, weights);
Vector expected = (Vector(4) << 1., 0., 0.,0.).finished();
EXPECT(assert_equal(expected, pseudo));

View File

@ -222,10 +222,9 @@ size_t getTicTocID(const char *descriptionC) {
static gtsam::FastMap<std::string, size_t> idMap;
// Retrieve or add this string
gtsam::FastMap<std::string, size_t>::const_iterator it = idMap.find(
description);
auto it = idMap.find(description);
if (it == idMap.end()) {
it = idMap.insert(std::make_pair(description, nextId)).first;
it = idMap.insert({description, nextId}).first;
++nextId;
}

View File

@ -424,10 +424,10 @@ namespace gtsam {
template <typename L, typename T1, typename T2>
std::pair<DecisionTree<L, T1>, DecisionTree<L, T2> > unzip(
const DecisionTree<L, std::pair<T1, T2> >& input) {
return std::make_pair(
return {
DecisionTree<L, T1>(input, [](std::pair<T1, T2> i) { return i.first; }),
DecisionTree<L, T2>(input,
[](std::pair<T1, T2> i) { return i.second; }));
DecisionTree<L, T2>(input, [](std::pair<T1, T2> i) { return i.second; })
};
}
} // namespace gtsam

View File

@ -140,8 +140,7 @@ namespace gtsam {
orderedKeys, product);
gttoc(lookup);
return std::make_pair(
std::dynamic_pointer_cast<DiscreteConditional>(lookup), max);
return {std::dynamic_pointer_cast<DiscreteConditional>(lookup), max};
}
/* ************************************************************************ */
@ -223,7 +222,7 @@ namespace gtsam {
std::make_shared<DiscreteConditional>(product, *sum, orderedKeys);
gttoc(divide);
return std::make_pair(conditional, sum);
return {conditional, sum};
}
/* ************************************************************************ */

View File

@ -458,9 +458,7 @@ TEST(DecisionTree, unzip) {
DTP tree(B, DTP(A, {0, "zero"}, {1, "one"}),
DTP(A, {2, "two"}, {1337, "l33t"}));
DT1 dt1;
DT2 dt2;
std::tie(dt1, dt2) = unzip(tree);
const auto [dt1, dt2] = unzip(tree);
DT1 tree1(B, DT1(A, 0, 1), DT1(A, 2, 1337));
DT2 tree2(B, DT2(A, "zero", "one"), DT2(A, "two", "l33t"));

View File

@ -109,9 +109,7 @@ TEST(DiscreteFactorGraph, test) {
// Test EliminateDiscrete
Ordering frontalKeys;
frontalKeys += Key(0);
DiscreteConditional::shared_ptr conditional;
DecisionTreeFactor::shared_ptr newFactor;
std::tie(conditional, newFactor) = EliminateDiscrete(graph, frontalKeys);
const auto [conditional, newFactor] = EliminateDiscrete(graph, frontalKeys);
// Check Conditional
CHECK(conditional);
@ -128,9 +126,7 @@ TEST(DiscreteFactorGraph, test) {
Ordering ordering;
ordering += Key(0), Key(1), Key(2);
DiscreteEliminationTree etree(graph, ordering);
DiscreteBayesNet::shared_ptr actual;
DiscreteFactorGraph::shared_ptr remainingGraph;
std::tie(actual, remainingGraph) = etree.eliminate(&EliminateDiscrete);
const auto [actual, remainingGraph] = etree.eliminate(&EliminateDiscrete);
// Check Bayes net
DiscreteBayesNet expectedBayesNet;

View File

@ -223,7 +223,7 @@ public:
* @return a pair of [start, end] indices into the tangent space vector
*/
inline static std::pair<size_t, size_t> translationInterval() {
return std::make_pair(3, 5);
return {3, 5};
}
/// @}

View File

@ -396,7 +396,7 @@ class CameraSet : public std::vector<CAMERA, Eigen::aligned_allocator<CAMERA>> {
FastMap<Key, size_t> KeySlotMap;
for (size_t slot = 0; slot < allKeys.size(); slot++)
KeySlotMap.insert(std::make_pair(allKeys[slot], slot));
KeySlotMap.emplace(allKeys[slot], slot);
// Schur complement trick
// G = F' * F - F' * E * P * E' * F

View File

@ -309,14 +309,14 @@ public:
* exponential map parameterization
* @return a pair of [start, end] indices into the tangent space vector
*/
inline static std::pair<size_t, size_t> translationInterval() { return std::make_pair(0, 1); }
inline static std::pair<size_t, size_t> translationInterval() { return {0, 1}; }
/**
* Return the start and end indices (inclusive) of the rotation component of the
* exponential map parameterization
* @return a pair of [start, end] indices into the tangent space vector
*/
static std::pair<size_t, size_t> rotationInterval() { return std::make_pair(2, 2); }
static std::pair<size_t, size_t> rotationInterval() { return {2, 2}; }
/// Output stream operator
GTSAM_EXPORT

View File

@ -364,7 +364,7 @@ public:
* @return a pair of [start, end] indices into the tangent space vector
*/
inline static std::pair<size_t, size_t> translationInterval() {
return std::make_pair(3, 5);
return {3, 5};
}
/**
@ -373,7 +373,7 @@ public:
* @return a pair of [start, end] indices into the tangent space vector
*/
static std::pair<size_t, size_t> rotationInterval() {
return std::make_pair(0, 2);
return {0, 2};
}
/**

View File

@ -31,9 +31,9 @@ namespace internal {
static Point2Pairs SubtractCentroids(const Point2Pairs& abPointPairs,
const Point2Pair& centroids) {
Point2Pairs d_abPointPairs;
for (const Point2Pair& abPair : abPointPairs) {
Point2 da = abPair.first - centroids.first;
Point2 db = abPair.second - centroids.second;
for (const auto& [a, b] : abPointPairs) {
Point2 da = a - centroids.first;
Point2 db = b - centroids.second;
d_abPointPairs.emplace_back(da, db);
}
return d_abPointPairs;
@ -43,10 +43,8 @@ static Point2Pairs SubtractCentroids(const Point2Pairs& abPointPairs,
static double CalculateScale(const Point2Pairs& d_abPointPairs,
const Rot2& aRb) {
double x = 0, y = 0;
Point2 da, db;
for (const Point2Pair& d_abPair : d_abPointPairs) {
std::tie(da, db) = d_abPair;
for (const auto& [da, db] : d_abPointPairs) {
const Vector2 da_prime = aRb * db;
y += da.transpose() * da_prime;
x += da_prime.transpose() * da_prime;
@ -58,8 +56,8 @@ static double CalculateScale(const Point2Pairs& d_abPointPairs,
/// Form outer product H.
static Matrix2 CalculateH(const Point2Pairs& d_abPointPairs) {
Matrix2 H = Z_2x2;
for (const Point2Pair& d_abPair : d_abPointPairs) {
H += d_abPair.first * d_abPair.second.transpose();
for (const auto& [da, db] : d_abPointPairs) {
H += da * db.transpose();
}
return H;
}
@ -186,9 +184,7 @@ Similarity2 Similarity2::Align(const Pose2Pairs& abPosePairs) {
abPointPairs.reserve(n);
// Below denotes the pose of the i'th object/camera/etc
// in frame "a" or frame "b".
Pose2 aTi, bTi;
for (const Pose2Pair& abPair : abPosePairs) {
std::tie(aTi, bTi) = abPair;
for (const auto& [aTi, bTi] : abPosePairs) {
const Rot2 aRb = aTi.rotation().compose(bTi.rotation().inverse());
rotations.emplace_back(aRb);
abPointPairs.emplace_back(aTi.translation(), bTi.translation());

View File

@ -31,9 +31,9 @@ namespace internal {
static Point3Pairs subtractCentroids(const Point3Pairs &abPointPairs,
const Point3Pair &centroids) {
Point3Pairs d_abPointPairs;
for (const Point3Pair& abPair : abPointPairs) {
Point3 da = abPair.first - centroids.first;
Point3 db = abPair.second - centroids.second;
for (const auto& [a, b] : abPointPairs) {
Point3 da = a - centroids.first;
Point3 db = b - centroids.second;
d_abPointPairs.emplace_back(da, db);
}
return d_abPointPairs;
@ -46,8 +46,7 @@ static double calculateScale(const Point3Pairs &d_abPointPairs,
const Rot3 &aRb) {
double x = 0, y = 0;
Point3 da, db;
for (const Point3Pair& d_abPair : d_abPointPairs) {
std::tie(da, db) = d_abPair;
for (const auto& [da, db] : d_abPointPairs) {
const Vector3 da_prime = aRb * db;
y += da.transpose() * da_prime;
x += da_prime.transpose() * da_prime;
@ -59,8 +58,8 @@ static double calculateScale(const Point3Pairs &d_abPointPairs,
/// Form outer product H.
static Matrix3 calculateH(const Point3Pairs &d_abPointPairs) {
Matrix3 H = Z_3x3;
for (const Point3Pair& d_abPair : d_abPointPairs) {
H += d_abPair.first * d_abPair.second.transpose();
for (const auto& [da, db] : d_abPointPairs) {
H += da * db.transpose();
}
return H;
}
@ -184,8 +183,7 @@ Similarity3 Similarity3::Align(const vector<Pose3Pair> &abPosePairs) {
abPointPairs.reserve(n);
// Below denotes the pose of the i'th object/camera/etc in frame "a" or frame "b"
Pose3 aTi, bTi;
for (const Pose3Pair &abPair : abPosePairs) {
std::tie(aTi, bTi) = abPair;
for (const auto &[aTi, bTi] : abPosePairs) {
const Rot3 aRb = aTi.rotation().compose(bTi.rotation().inverse());
rotations.emplace_back(aRb);
abPointPairs.emplace_back(aTi.translation(), bTi.translation());

View File

@ -176,7 +176,7 @@ TEST(Point3, mean) {
TEST(Point3, mean_pair) {
Point3 a_mean(2, 2, 2), b_mean(-1, 1, 0);
Point3Pair expected = std::make_pair(a_mean, b_mean);
Point3Pair expected = {a_mean, b_mean};
Point3 a1(0, 0, 0), a2(1, 2, 3), a3(5, 4, 3);
Point3 b1(-1, 0, 0), b2(-2, 4, 0), b3(0, -1, 0);
std::vector<Point3Pair> point_pairs{{a1, b1}, {a2, b2}, {a3, b3}};

View File

@ -128,10 +128,8 @@ TEST( Rot3, AxisAngle2)
// constructor from a rotation matrix, as doubles in *row-major* order.
Rot3 R1(-0.999957, 0.00922903, 0.00203116, 0.00926964, 0.999739, 0.0208927, -0.0018374, 0.0209105, -0.999781);
Unit3 actualAxis;
double actualAngle;
// convert Rot3 to quaternion using GTSAM
std::tie(actualAxis, actualAngle) = R1.axisAngle();
const auto [actualAxis, actualAngle] = R1.axisAngle();
double expectedAngle = 3.1396582;
CHECK(assert_equal(expectedAngle, actualAngle, 1e-5));
@ -508,11 +506,9 @@ TEST( Rot3, yaw_pitch_roll )
TEST( Rot3, RQ)
{
// Try RQ on a pure rotation
Matrix actualK;
Vector actual;
std::tie(actualK, actual) = RQ(R.matrix());
const auto [actualK, actual] = RQ(R.matrix());
Vector expected = Vector3(0.14715, 0.385821, 0.231671);
CHECK(assert_equal(I_3x3,actualK));
CHECK(assert_equal(I_3x3, (Matrix)actualK));
CHECK(assert_equal(expected,actual,1e-6));
// Try using xyz call, asserting that Rot3::RzRyRx(x,y,z).xyz()==[x;y;z]
@ -531,9 +527,9 @@ TEST( Rot3, RQ)
// Try RQ to recover calibration from 3*3 sub-block of projection matrix
Matrix K = (Matrix(3, 3) << 500.0, 0.0, 320.0, 0.0, 500.0, 240.0, 0.0, 0.0, 1.0).finished();
Matrix A = K * R.matrix();
std::tie(actualK, actual) = RQ(A);
CHECK(assert_equal(K,actualK));
CHECK(assert_equal(expected,actual,1e-6));
const auto [actualK2, actual2] = RQ(A);
CHECK(assert_equal(K, actualK2));
CHECK(assert_equal(expected, actual2, 1e-6));
}
/* ************************************************************************* */

View File

@ -48,10 +48,7 @@ Vector4 triangulateHomogeneousDLT(
A.row(row) = p.x() * projection.row(2) - projection.row(0);
A.row(row + 1) = p.y() * projection.row(2) - projection.row(1);
}
int rank;
double error;
Vector v;
std::tie(rank, error, v) = DLT(A, rank_tol);
const auto [rank, error, v] = DLT(A, rank_tol);
if (rank < 3) throw(TriangulationUnderconstrainedException());
@ -79,10 +76,7 @@ Vector4 triangulateHomogeneousDLT(
A.row(row) = p.x() * projection.row(2) - p.z() * projection.row(0);
A.row(row + 1) = p.y() * projection.row(2) - p.z() * projection.row(1);
}
int rank;
double error;
Vector v;
std::tie(rank, error, v) = DLT(A, rank_tol);
const auto [rank, error, v] = DLT(A, rank_tol);
if (rank < 3) throw(TriangulationUnderconstrainedException());

View File

@ -137,7 +137,7 @@ std::pair<NonlinearFactorGraph, Values> triangulationGraph(
graph.emplace_shared<TriangulationFactor<Camera> > //
(camera_i, measurements[i], model, landmarkKey);
}
return std::make_pair(graph, values);
return {graph, values};
}
/**
@ -165,7 +165,7 @@ std::pair<NonlinearFactorGraph, Values> triangulationGraph(
graph.emplace_shared<TriangulationFactor<CAMERA> > //
(camera_i, measurements[i], model? model : unit, landmarkKey);
}
return std::make_pair(graph, values);
return {graph, values};
}
/**
@ -193,9 +193,7 @@ Point3 triangulateNonlinear(const std::vector<Pose3>& poses,
const SharedNoiseModel& model = nullptr) {
// Create a factor graph and initial values
Values values;
NonlinearFactorGraph graph;
std::tie(graph, values) = triangulationGraph<CALIBRATION> //
const auto [graph, values] = triangulationGraph<CALIBRATION> //
(poses, sharedCal, measurements, Symbol('p', 0), initialEstimate, model);
return optimize(graph, values, Symbol('p', 0));
@ -215,9 +213,7 @@ Point3 triangulateNonlinear(
const SharedNoiseModel& model = nullptr) {
// Create a factor graph and initial values
Values values;
NonlinearFactorGraph graph;
std::tie(graph, values) = triangulationGraph<CAMERA> //
const auto [graph, values] = triangulationGraph<CAMERA> //
(cameras, measurements, Symbol('p', 0), initialEstimate, model);
return optimize(graph, values, Symbol('p', 0));

View File

@ -251,9 +251,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors,
#endif
// Separate out decision tree into conditionals and remaining factors.
GaussianMixture::Conditionals conditionals;
GaussianMixtureFactor::Factors newFactors;
std::tie(conditionals, newFactors) = unzip(eliminationResults);
const auto [conditionals, newFactors] = unzip(eliminationResults);
// Create the GaussianMixture from the conditionals
auto gaussianMixture = std::make_shared<GaussianMixture>(

View File

@ -100,9 +100,7 @@ struct HybridConstructorTraversalData {
Ordering keyAsOrdering;
keyAsOrdering.push_back(node->key);
SymbolicConditional::shared_ptr conditional;
SymbolicFactor::shared_ptr separatorFactor;
std::tie(conditional, separatorFactor) =
const auto [conditional, separatorFactor] =
internal::EliminateSymbolic(symbolicFactors, keyAsOrdering);
// Store symbolic elimination results in the parent

View File

@ -131,9 +131,7 @@ TEST(HybridBayesNet, Choose) {
const Ordering ordering(s.linearizationPoint.keys());
HybridBayesNet::shared_ptr hybridBayesNet;
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
std::tie(hybridBayesNet, remainingFactorGraph) =
const auto [hybridBayesNet, remainingFactorGraph] =
s.linearizedFactorGraph.eliminatePartialSequential(ordering);
DiscreteValues assignment;
@ -162,9 +160,7 @@ TEST(HybridBayesNet, OptimizeAssignment) {
const Ordering ordering(s.linearizationPoint.keys());
HybridBayesNet::shared_ptr hybridBayesNet;
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
std::tie(hybridBayesNet, remainingFactorGraph) =
const auto [hybridBayesNet, remainingFactorGraph] =
s.linearizedFactorGraph.eliminatePartialSequential(ordering);
DiscreteValues assignment;

View File

@ -99,9 +99,7 @@ TEST(HybridBayesTree, OptimizeAssignment) {
Ordering ordering;
for (size_t k = 0; k < s.K; k++) ordering += X(k);
HybridBayesNet::shared_ptr hybridBayesNet;
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
std::tie(hybridBayesNet, remainingFactorGraph) =
const auto [hybridBayesNet, remainingFactorGraph] =
s.linearizedFactorGraph.eliminatePartialSequential(ordering);
GaussianBayesNet gbn = hybridBayesNet->choose(assignment);
@ -143,9 +141,7 @@ TEST(HybridBayesTree, Optimize) {
Ordering ordering;
for (size_t k = 0; k < s.K; k++) ordering += X(k);
HybridBayesNet::shared_ptr hybridBayesNet;
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
std::tie(hybridBayesNet, remainingFactorGraph) =
const auto [hybridBayesNet, remainingFactorGraph] =
s.linearizedFactorGraph.eliminatePartialSequential(ordering);
DiscreteFactorGraph dfg;

View File

@ -239,10 +239,8 @@ std::vector<size_t> getDiscreteSequence(size_t x) {
*/
AlgebraicDecisionTree<Key> getProbPrimeTree(
const HybridGaussianFactorGraph& graph) {
HybridBayesNet::shared_ptr bayesNet;
HybridGaussianFactorGraph::shared_ptr remainingGraph;
Ordering continuous(graph.continuousKeySet());
std::tie(bayesNet, remainingGraph) =
const auto [bayesNet, remainingGraph] =
graph.eliminatePartialSequential(continuous);
auto last_conditional = bayesNet->at(bayesNet->size() - 1);
@ -297,9 +295,7 @@ TEST(HybridEstimation, Probability) {
// Continuous elimination
Ordering continuous_ordering(graph.continuousKeySet());
HybridBayesNet::shared_ptr bayesNet;
HybridGaussianFactorGraph::shared_ptr discreteGraph;
std::tie(bayesNet, discreteGraph) =
auto [bayesNet, discreteGraph] =
graph.eliminatePartialSequential(continuous_ordering);
// Discrete elimination
@ -347,9 +343,7 @@ TEST(HybridEstimation, ProbabilityMultifrontal) {
// Eliminate continuous
Ordering continuous_ordering(graph.continuousKeySet());
HybridBayesTree::shared_ptr bayesTree;
HybridGaussianFactorGraph::shared_ptr discreteGraph;
std::tie(bayesTree, discreteGraph) =
const auto [bayesTree, discreteGraph] =
graph.eliminatePartialMultifrontal(continuous_ordering);
// Get the last continuous conditional which will have all the discrete keys

View File

@ -270,9 +270,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
auto ordering_full =
Ordering::ColamdConstrainedLast(hfg, {M(0), M(1), M(2), M(3)});
HybridBayesTree::shared_ptr hbt;
HybridGaussianFactorGraph::shared_ptr remaining;
std::tie(hbt, remaining) = hfg.eliminatePartialMultifrontal(ordering_full);
const auto [hbt, remaining] = hfg.eliminatePartialMultifrontal(ordering_full);
// 9 cliques in the bayes tree and 0 remaining variables to eliminate.
EXPECT_LONGS_EQUAL(9, hbt->size());
@ -327,10 +325,9 @@ TEST(HybridGaussianFactorGraph, Switching) {
std::transform(naturalX.begin(), naturalX.end(), std::back_inserter(ordX),
[](int x) { return X(x); });
KeyVector ndX;
std::vector<int> lvls;
std::tie(ndX, lvls) = makeBinaryOrdering(ordX);
auto [ndX, lvls] = makeBinaryOrdering(ordX);
std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering));
// TODO(dellaert): this has no effect!
for (auto &l : lvls) {
l = -l;
}
@ -341,11 +338,9 @@ TEST(HybridGaussianFactorGraph, Switching) {
std::vector<Key> ordC;
std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC),
[](int x) { return M(x); });
KeyVector ndC;
std::vector<int> lvls;
// std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering));
std::tie(ndC, lvls) = makeBinaryOrdering(ordC);
const auto [ndC, lvls] = makeBinaryOrdering(ordC);
std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering));
}
auto ordering_full = Ordering(ordering);
@ -353,9 +348,7 @@ TEST(HybridGaussianFactorGraph, Switching) {
// GTSAM_PRINT(*hfg);
// GTSAM_PRINT(ordering_full);
HybridBayesTree::shared_ptr hbt;
HybridGaussianFactorGraph::shared_ptr remaining;
std::tie(hbt, remaining) = hfg->eliminatePartialMultifrontal(ordering_full);
const auto [hbt, remaining] = hfg->eliminatePartialMultifrontal(ordering_full);
// 12 cliques in the bayes tree and 0 remaining variables to eliminate.
EXPECT_LONGS_EQUAL(12, hbt->size());
@ -388,10 +381,9 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) {
std::transform(naturalX.begin(), naturalX.end(), std::back_inserter(ordX),
[](int x) { return X(x); });
KeyVector ndX;
std::vector<int> lvls;
std::tie(ndX, lvls) = makeBinaryOrdering(ordX);
auto [ndX, lvls] = makeBinaryOrdering(ordX);
std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering));
// TODO(dellaert): this has no effect!
for (auto &l : lvls) {
l = -l;
}
@ -402,18 +394,14 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) {
std::vector<Key> ordC;
std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC),
[](int x) { return M(x); });
KeyVector ndC;
std::vector<int> lvls;
// std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering));
std::tie(ndC, lvls) = makeBinaryOrdering(ordC);
const auto [ndC, lvls] = makeBinaryOrdering(ordC);
std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering));
}
auto ordering_full = Ordering(ordering);
HybridBayesTree::shared_ptr hbt;
HybridGaussianFactorGraph::shared_ptr remaining;
std::tie(hbt, remaining) = hfg->eliminatePartialMultifrontal(ordering_full);
const auto [hbt, remaining] = hfg->eliminatePartialMultifrontal(ordering_full);
auto new_fg = makeSwitchingChain(12);
auto isam = HybridGaussianISAM(*hbt);
@ -482,9 +470,7 @@ TEST(HybridGaussianFactorGraph, SwitchingTwoVar) {
ordering_partial.emplace_back(X(i));
ordering_partial.emplace_back(Y(i));
}
HybridBayesNet::shared_ptr hbn;
HybridGaussianFactorGraph::shared_ptr remaining;
std::tie(hbn, remaining) = hfg->eliminatePartialSequential(ordering_partial);
const auto [hbn, remaining] = hfg->eliminatePartialSequential(ordering_partial);
EXPECT_LONGS_EQUAL(14, hbn->size());
EXPECT_LONGS_EQUAL(11, remaining->size());
@ -847,10 +833,9 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) {
EXPECT(ratioTest(bn, measurements, fg));
// Do elimination of X(2) only:
auto result = fg.eliminatePartialSequential(Ordering{X(2)});
auto fg1 = *result.second;
fg1.push_back(*result.first);
EXPECT(ratioTest(bn, measurements, fg1));
auto [bn1, fg1] = fg.eliminatePartialSequential(Ordering{X(2)});
fg1->push_back(*bn1);
EXPECT(ratioTest(bn, measurements, *fg1));
// Create ordering that eliminates in time order, then discrete modes:
Ordering ordering {X(2), X(1), X(0), N(0), N(1), N(2), M(1), M(2)};

View File

@ -132,9 +132,7 @@ TEST(HybridGaussianElimination, IncrementalInference) {
ordering += X(2);
// Now we calculate the expected factors using full elimination
HybridBayesTree::shared_ptr expectedHybridBayesTree;
HybridGaussianFactorGraph::shared_ptr expectedRemainingGraph;
std::tie(expectedHybridBayesTree, expectedRemainingGraph) =
const auto [expectedHybridBayesTree, expectedRemainingGraph] =
switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering);
// The densities on X(0) should be the same
@ -231,9 +229,7 @@ TEST(HybridGaussianElimination, Approx_inference) {
}
// Now we calculate the actual factors using full elimination
HybridBayesTree::shared_ptr unprunedHybridBayesTree;
HybridGaussianFactorGraph::shared_ptr unprunedRemainingGraph;
std::tie(unprunedHybridBayesTree, unprunedRemainingGraph) =
const auto [unprunedHybridBayesTree, unprunedRemainingGraph] =
switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering);
size_t maxNrLeaves = 5;

View File

@ -350,10 +350,7 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) {
ordering += X(0);
ordering += X(1);
HybridConditional::shared_ptr hybridConditionalMixture;
std::shared_ptr<Factor> factorOnModes;
std::tie(hybridConditionalMixture, factorOnModes) =
const auto [hybridConditionalMixture, factorOnModes] =
EliminateHybrid(factors, ordering);
auto gaussianConditionalMixture =
@ -385,9 +382,7 @@ TEST(HybridFactorGraph, Partial_Elimination) {
for (size_t k = 0; k < self.K; k++) ordering += X(k);
// Eliminate partially i.e. only continuous part.
HybridBayesNet::shared_ptr hybridBayesNet;
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
std::tie(hybridBayesNet, remainingFactorGraph) =
const auto [hybridBayesNet, remainingFactorGraph] =
linearizedFactorGraph.eliminatePartialSequential(ordering);
CHECK(hybridBayesNet);
@ -415,8 +410,6 @@ TEST(HybridFactorGraph, Full_Elimination) {
auto linearizedFactorGraph = self.linearizedFactorGraph;
// We first do a partial elimination
HybridBayesNet::shared_ptr hybridBayesNet_partial;
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph_partial;
DiscreteBayesNet discreteBayesNet;
{
@ -425,7 +418,7 @@ TEST(HybridFactorGraph, Full_Elimination) {
for (size_t k = 0; k < self.K; k++) ordering += X(k);
// Eliminate partially.
std::tie(hybridBayesNet_partial, remainingFactorGraph_partial) =
const auto [hybridBayesNet_partial, remainingFactorGraph_partial] =
linearizedFactorGraph.eliminatePartialSequential(ordering);
DiscreteFactorGraph discrete_fg;
@ -489,9 +482,7 @@ TEST(HybridFactorGraph, Printing) {
for (size_t k = 0; k < self.K; k++) ordering += X(k);
// Eliminate partially.
HybridBayesNet::shared_ptr hybridBayesNet;
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
std::tie(hybridBayesNet, remainingFactorGraph) =
const auto [hybridBayesNet, remainingFactorGraph] =
linearizedFactorGraph.eliminatePartialSequential(ordering);
string expected_hybridFactorGraph = R"(
@ -721,11 +712,9 @@ TEST(HybridFactorGraph, DefaultDecisionTree) {
ordering += X(1);
HybridGaussianFactorGraph linearized = *fg.linearize(initialEstimate);
gtsam::HybridBayesNet::shared_ptr hybridBayesNet;
gtsam::HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
// This should NOT fail
std::tie(hybridBayesNet, remainingFactorGraph) =
const auto [hybridBayesNet, remainingFactorGraph] =
linearized.eliminatePartialSequential(ordering);
EXPECT_LONGS_EQUAL(4, hybridBayesNet->size());
EXPECT_LONGS_EQUAL(1, remainingFactorGraph->size());

View File

@ -149,9 +149,7 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
ordering += X(2);
// Now we calculate the actual factors using full elimination
HybridBayesTree::shared_ptr expectedHybridBayesTree;
HybridGaussianFactorGraph::shared_ptr expectedRemainingGraph;
std::tie(expectedHybridBayesTree, expectedRemainingGraph) =
const auto [expectedHybridBayesTree, expectedRemainingGraph] =
switching.linearizedFactorGraph
.BaseEliminateable::eliminatePartialMultifrontal(ordering);
@ -250,9 +248,7 @@ TEST(HybridNonlinearISAM, Approx_inference) {
}
// Now we calculate the actual factors using full elimination
HybridBayesTree::shared_ptr unprunedHybridBayesTree;
HybridGaussianFactorGraph::shared_ptr unprunedRemainingGraph;
std::tie(unprunedHybridBayesTree, unprunedRemainingGraph) =
const auto [unprunedHybridBayesTree, unprunedRemainingGraph] =
switching.linearizedFactorGraph
.BaseEliminateable::eliminatePartialMultifrontal(ordering);

View File

@ -245,7 +245,7 @@ namespace gtsam {
void BayesTree<CLIQUE>::fillNodesIndex(const sharedClique& subtree) {
// Add each frontal variable of this root node
for(const Key& j: subtree->conditional()->frontals()) {
bool inserted = nodes_.insert(std::make_pair(j, subtree)).second;
bool inserted = nodes_.insert({j, subtree}).second;
assert(inserted); (void)inserted;
}
// Fill index for each child
@ -360,9 +360,10 @@ namespace gtsam {
C1_minus_B.assign(C1_minus_B_set.begin(), C1_minus_B_set.end());
}
// Factor into C1\B | B.
sharedFactorGraph temp_remaining;
std::tie(p_C1_B, temp_remaining) =
FactorGraphType(p_C1_Bred).eliminatePartialMultifrontal(Ordering(C1_minus_B), function);
p_C1_B =
FactorGraphType(p_C1_Bred)
.eliminatePartialMultifrontal(Ordering(C1_minus_B), function)
.first;
}
std::shared_ptr<typename EliminationTraitsType::BayesTreeType> p_C2_B; {
KeyVector C2_minus_B; {
@ -372,9 +373,10 @@ namespace gtsam {
C2_minus_B.assign(C2_minus_B_set.begin(), C2_minus_B_set.end());
}
// Factor into C2\B | B.
sharedFactorGraph temp_remaining;
std::tie(p_C2_B, temp_remaining) =
FactorGraphType(p_C2_Bred).eliminatePartialMultifrontal(Ordering(C2_minus_B), function);
p_C2_B =
FactorGraphType(p_C2_Bred)
.eliminatePartialMultifrontal(Ordering(C2_minus_B), function)
.first;
}
gttoc(Full_root_factoring);

View File

@ -210,7 +210,7 @@ struct EliminationData {
// putting orphan subtrees in the index - they'll already be in the index of the ISAM2
// object they're added to.
for (const Key& j: myData.bayesTreeNode->conditional()->frontals())
nodesIndex_.insert(std::make_pair(j, myData.bayesTreeNode));
nodesIndex_.emplace(j, myData.bayesTreeNode);
// Store remaining factor in parent's gathered factors
if (!eliminationResult.second->empty()) {
@ -273,7 +273,7 @@ EliminatableClusterTree<BAYESTREE, GRAPH>::eliminate(const Eliminate& function)
}
// Return result
return std::make_pair(result, remaining);
return {result, remaining};
}
} // namespace gtsam

View File

@ -72,9 +72,7 @@ namespace gtsam {
gttic(eliminateSequential);
// Do elimination
EliminationTreeType etree(asDerived(), (*variableIndex).get(), ordering);
std::shared_ptr<BayesNetType> bayesNet;
std::shared_ptr<FactorGraphType> factorGraph;
std::tie(bayesNet,factorGraph) = etree.eliminate(function);
const auto [bayesNet, factorGraph] = etree.eliminate(function);
// If any factors are remaining, the ordering was incomplete
if(!factorGraph->empty())
throw InconsistentEliminationRequested();
@ -136,9 +134,7 @@ namespace gtsam {
// Do elimination with given ordering
EliminationTreeType etree(asDerived(), (*variableIndex).get(), ordering);
JunctionTreeType junctionTree(etree);
std::shared_ptr<BayesTreeType> bayesTree;
std::shared_ptr<FactorGraphType> factorGraph;
std::tie(bayesTree,factorGraph) = junctionTree.eliminate(function);
const auto [bayesTree, factorGraph] = junctionTree.eliminate(function);
// If any factors are remaining, the ordering was incomplete
if(!factorGraph->empty())
throw InconsistentEliminationRequested();
@ -274,9 +270,7 @@ namespace gtsam {
gttic(marginalMultifrontalBayesNet);
// An ordering was provided for the marginalized variables, so we can first eliminate them
// in the order requested.
std::shared_ptr<BayesTreeType> bayesTree;
std::shared_ptr<FactorGraphType> factorGraph;
std::tie(bayesTree,factorGraph) =
const auto [bayesTree, factorGraph] =
eliminatePartialMultifrontal(marginalizedVariableOrdering, function, variableIndex);
if(const Ordering* varsAsOrdering = boost::get<const Ordering&>(&variables))
@ -341,9 +335,7 @@ namespace gtsam {
gttic(marginalMultifrontalBayesTree);
// An ordering was provided for the marginalized variables, so we can first eliminate them
// in the order requested.
std::shared_ptr<BayesTreeType> bayesTree;
std::shared_ptr<FactorGraphType> factorGraph;
std::tie(bayesTree,factorGraph) =
const auto [bayesTree, factorGraph] =
eliminatePartialMultifrontal(marginalizedVariableOrdering, function, variableIndex);
if(const Ordering* varsAsOrdering = boost::get<const Ordering&>(&variables))

View File

@ -198,7 +198,7 @@ namespace gtsam {
allRemainingFactors->push_back(remainingFactors.begin(), remainingFactors.end());
// Return result
return std::make_pair(result, allRemainingFactors);
return {result, allRemainingFactors};
}
/* ************************************************************************* */
@ -218,13 +218,13 @@ namespace gtsam {
// Add roots in sorted order
{
FastMap<Key,sharedNode> keys;
for(const sharedNode& root: this->roots_) { keys.insert(std::make_pair(root->key, root)); }
for(const sharedNode& root: this->roots_) { keys.emplace(root->key, root); }
typedef typename FastMap<Key,sharedNode>::value_type Key_Node;
for(const Key_Node& key_node: keys) { stack1.push(key_node.second); }
}
{
FastMap<Key,sharedNode> keys;
for(const sharedNode& root: expected.roots_) { keys.insert(std::make_pair(root->key, root)); }
for(const sharedNode& root: expected.roots_) { keys.emplace(root->key, root); }
typedef typename FastMap<Key,sharedNode>::value_type Key_Node;
for(const Key_Node& key_node: keys) { stack2.push(key_node.second); }
}
@ -258,13 +258,13 @@ namespace gtsam {
// Add children in sorted order
{
FastMap<Key,sharedNode> keys;
for(const sharedNode& node: node1->children) { keys.insert(std::make_pair(node->key, node)); }
for(const sharedNode& node: node1->children) { keys.emplace(node->key, node); }
typedef typename FastMap<Key,sharedNode>::value_type Key_Node;
for(const Key_Node& key_node: keys) { stack1.push(key_node.second); }
}
{
FastMap<Key,sharedNode> keys;
for(const sharedNode& node: node2->children) { keys.insert(std::make_pair(node->key, node)); }
for(const sharedNode& node: node2->children) { keys.emplace(node->key, node); }
typedef typename FastMap<Key,sharedNode>::value_type Key_Node;
for(const Key_Node& key_node: keys) { stack2.push(key_node.second); }
}

View File

@ -83,10 +83,8 @@ struct ConstructorTraversalData {
Ordering keyAsOrdering;
keyAsOrdering.push_back(ETreeNode->key);
SymbolicConditional::shared_ptr myConditional;
SymbolicFactor::shared_ptr mySeparatorFactor;
std::tie(myConditional, mySeparatorFactor) = internal::EliminateSymbolic(
symbolicFactors, keyAsOrdering);
const auto [myConditional, mySeparatorFactor] =
internal::EliminateSymbolic(symbolicFactors, keyAsOrdering);
// Store symbolic elimination results in the parent
myData.parentData->childSymbolicConditionals.push_back(myConditional);

View File

@ -109,8 +109,7 @@ VariableSlots::VariableSlots(const FG& factorGraph)
// we're combining. Initially we put the max integer value in
// the array entry for each factor that will indicate the factor
// does not involve the variable.
iterator thisVarSlots; bool inserted;
std::tie(thisVarSlots, inserted) = this->insert(std::make_pair(involvedVariable, FastVector<size_t>()));
auto [thisVarSlots, inserted] = this->insert({involvedVariable, FastVector<size_t>()});
if(inserted)
thisVarSlots->second.resize(factorGraph.nrFactors(), Empty);
thisVarSlots->second[jointFactorPos] = factorVarSlot;

View File

@ -48,13 +48,8 @@ public:
/* ************************************************************************* */
template<class KEY>
std::list<KEY> predecessorMap2Keys(const PredecessorMap<KEY>& p_map) {
typedef typename SGraph<KEY>::Vertex SVertex;
SGraph<KEY> g;
SVertex root;
std::map<KEY, SVertex> key2vertex;
std::tie(g, root, key2vertex) = gtsam::predecessorMap2Graph<SGraph<KEY>, SVertex, KEY>(p_map);
const auto [g, root, key2vertex] = gtsam::predecessorMap2Graph<SGraph<KEY>, SVertex, KEY>(p_map);
// breadth first visit on the graph
std::list<KEY> keys;
@ -121,18 +116,16 @@ predecessorMap2Graph(const PredecessorMap<KEY>& p_map) {
std::map<KEY, V> key2vertex;
V v1, v2, root;
bool foundRoot = false;
for(auto child_parent: p_map) {
KEY child, parent;
std::tie(child,parent) = child_parent;
for(const auto& [child, parent]: p_map) {
if (key2vertex.find(child) == key2vertex.end()) {
v1 = add_vertex(child, g);
key2vertex.insert(std::make_pair(child, v1));
key2vertex.emplace(child, v1);
} else
v1 = key2vertex[child];
if (key2vertex.find(parent) == key2vertex.end()) {
v2 = add_vertex(parent, g);
key2vertex.insert(std::make_pair(parent, v2));
key2vertex.emplace(parent, v2);
} else
v2 = key2vertex[parent];
@ -180,7 +173,6 @@ std::shared_ptr<Values> composePoses(const G& graph, const PredecessorMap<KEY>&
boost::property<boost::vertex_name_t, KEY>,
boost::property<boost::edge_weight_t, POSE> > PoseGraph;
typedef typename boost::graph_traits<PoseGraph>::vertex_descriptor PoseVertex;
typedef typename boost::graph_traits<PoseGraph>::edge_descriptor PoseEdge;
PoseGraph g;
PoseVertex root;
@ -189,8 +181,6 @@ std::shared_ptr<Values> composePoses(const G& graph, const PredecessorMap<KEY>&
predecessorMap2Graph<PoseGraph, PoseVertex, KEY>(tree);
// attach the relative poses to the edges
PoseEdge edge12, edge21;
bool found1, found2;
for(typename G::sharedFactor nl_factor: graph) {
if (nl_factor->keys().size() > 2)
@ -207,8 +197,8 @@ std::shared_ptr<Values> composePoses(const G& graph, const PredecessorMap<KEY>&
PoseVertex v2 = key2vertex.find(key2)->second;
POSE l1Xl2 = factor->measured();
std::tie(edge12, found1) = boost::edge(v1, v2, g);
std::tie(edge21, found2) = boost::edge(v2, v1, g);
const auto [edge12, found1] = boost::edge(v1, v2, g);
const auto [edge21, found2] = boost::edge(v2, v1, g);
if (found1 && found2) throw std::invalid_argument ("composePoses: invalid spanning tree");
if (!found1 && !found2) continue;
if (found1)

View File

@ -59,7 +59,7 @@ namespace gtsam {
public:
/** convenience insert so we can pass ints for TypedSymbol keys */
inline void insert(const KEY& key, const KEY& parent) {
std::map<KEY, KEY>::insert(std::make_pair(key, parent));
std::map<KEY, KEY>::emplace(key, parent);
}
};

View File

@ -416,9 +416,7 @@ void HessianFactor::multiplyHessianAdd(double alpha, const VectorValues& x,
// copy to yvalues
for (DenseIndex i = 0; i < (DenseIndex) size(); ++i) {
bool didNotExist;
VectorValues::iterator it;
std::tie(it, didNotExist) = yvalues.tryInsert(keys_[i], Vector());
const auto [it, didNotExist] = yvalues.tryInsert(keys_[i], Vector());
if (didNotExist)
it->second = alpha * y[i]; // init
else

View File

@ -96,9 +96,7 @@ JacobianFactor::JacobianFactor(const HessianFactor& factor)
Ab_.full() = factor.info().selfadjointView();
// Do Cholesky to get a Jacobian
size_t maxrank;
bool success;
std::tie(maxrank, success) = choleskyCareful(Ab_.matrix());
const auto [maxrank, success] = choleskyCareful(Ab_.matrix());
// Check that Cholesky succeeded OR it managed to factor the full Hessian.
// THe latter case occurs with non-positive definite matrices arising from QP.
@ -215,9 +213,7 @@ void JacobianFactor::JacobianFactorHelper(const GaussianFactorGraph& graph,
graph);
// Count dimensions
FastVector<DenseIndex> varDims;
DenseIndex m, n;
std::tie(varDims, m, n) = _countDims(jacobians, orderedSlots);
const auto [varDims, m, n] = _countDims(jacobians, orderedSlots);
// Allocate matrix and copy keys in order
gttic(allocate);

View File

@ -38,12 +38,12 @@ KalmanFilter::solve(const GaussianFactorGraph& factorGraph) const {
// Eliminate the graph using the provided Eliminate function
Ordering ordering(factorGraph.keys());
GaussianBayesNet::shared_ptr bayesNet = //
const auto bayesNet = //
factorGraph.eliminateSequential(ordering, function_);
// As this is a filter, all we need is the posterior P(x_t).
// This is the last GaussianConditional in the resulting BayesNet
GaussianConditional::shared_ptr posterior = *(--bayesNet->end());
GaussianConditional::shared_ptr posterior = bayesNet->back();
return std::make_shared<GaussianDensity>(*posterior);
}

View File

@ -473,7 +473,7 @@ std::pair<GaussianFactorGraph, GaussianFactorGraph> splitFactorGraph(
remaining.remove(e.index);
}
return std::make_pair(subgraphFactors, remaining);
return {subgraphFactors, remaining};
}
/*****************************************************************************/

View File

@ -34,8 +34,7 @@ namespace gtsam {
SubgraphSolver::SubgraphSolver(const GaussianFactorGraph &Ab,
const Parameters &parameters, const Ordering& ordering) :
parameters_(parameters) {
GaussianFactorGraph Ab1, Ab2;
std::tie(Ab1, Ab2) = splitGraph(Ab);
const auto [Ab1, Ab2] = splitGraph(Ab);
if (parameters_.verbosity())
cout << "Split A into (A1) " << Ab1.size() << " and (A2) " << Ab2.size()
<< " factors" << endl;

View File

@ -43,7 +43,7 @@ namespace gtsam {
#ifdef TBB_GREATER_EQUAL_2020
values_.emplace(key, x.segment(j, n));
#else
values_.insert(std::make_pair(key, x.segment(j, n)));
values_.insert({key, x.segment(j, n)});
#endif
j += n;
}
@ -56,7 +56,7 @@ namespace gtsam {
#ifdef TBB_GREATER_EQUAL_2020
values_.emplace(v.key, x.segment(j, v.dimension));
#else
values_.insert(std::make_pair(v.key, x.segment(j, v.dimension)));
values_.insert({v.key, x.segment(j, v.dimension)});
#endif
j += v.dimension;
}
@ -70,7 +70,7 @@ namespace gtsam {
#ifdef TBB_GREATER_EQUAL_2020
result.values_.emplace(key, Vector::Zero(value.size()));
#else
result.values_.insert(std::make_pair(key, Vector::Zero(value.size())));
result.values_.insert({key, Vector::Zero(value.size())});
#endif
return result;
}
@ -267,7 +267,7 @@ namespace gtsam {
#ifdef TBB_GREATER_EQUAL_2020
result.values_.emplace(j1->first, j1->second + j2->second);
#else
result.values_.insert(std::make_pair(j1->first, j1->second + j2->second));
result.values_.insert({j1->first, j1->second + j2->second});
#endif
return result;
@ -329,7 +329,7 @@ namespace gtsam {
#ifdef TBB_GREATER_EQUAL_2020
result.values_.emplace(j1->first, j1->second - j2->second);
#else
result.values_.insert(std::make_pair(j1->first, j1->second - j2->second));
result.values_.insert({j1->first, j1->second - j2->second});
#endif
return result;
@ -349,7 +349,7 @@ namespace gtsam {
#ifdef TBB_GREATER_EQUAL_2020
result.values_.emplace(key_v.first, a * key_v.second);
#else
result.values_.insert(std::make_pair(key_v.first, a * key_v.second));
result.values_.insert({key_v.first, a * key_v.second});
#endif
return result;
}

View File

@ -80,7 +80,7 @@ namespace gtsam {
public:
typedef Values::iterator iterator; ///< Iterator over vector values
typedef Values::const_iterator const_iterator; ///< Const iterator over vector values
typedef std::shared_ptr<This> shared_ptr; ///< shared_ptr to this class
typedef std::shared_ptr<This> shared_ptr; ///< shared_ptr to this class
typedef Values::value_type value_type; ///< Typedef to pair<Key, Vector>
typedef value_type KeyValuePair; ///< Typedef to pair<Key, Vector>
typedef std::map<Key, size_t> Dims; ///< Keyed vector dimensions
@ -186,7 +186,7 @@ namespace gtsam {
#if ! defined(GTSAM_USE_TBB) || defined (TBB_GREATER_EQUAL_2020)
return values_.emplace(std::piecewise_construct, std::forward_as_tuple(j), std::forward_as_tuple(args...));
#else
return values_.insert(std::make_pair(j, Vector(std::forward<Args>(args)...)));
return values_.insert({j, Vector(std::forward<Args>(args)...)});
#endif
}
@ -195,7 +195,7 @@ namespace gtsam {
* @param value The vector to be inserted.
* @param j The index with which the value will be associated. */
iterator insert(Key j, const Vector& value) {
return insert(std::make_pair(j, value));
return insert({j, value});
}
/** Insert all values from \c values. Throws an invalid_argument exception if any keys to be
@ -210,7 +210,7 @@ namespace gtsam {
#ifdef TBB_GREATER_EQUAL_2020
return values_.emplace(j, value);
#else
return values_.insert(std::make_pair(j, value));
return values_.insert({j, value});
#endif
}

View File

@ -50,8 +50,7 @@ static GaussianBayesNet noisyBayesNet = {
/* ************************************************************************* */
TEST( GaussianBayesNet, Matrix )
{
Matrix R; Vector d;
std::tie(R,d) = smallBayesNet.matrix(); // find matrix and RHS
const auto [R, d] = smallBayesNet.matrix(); // find matrix and RHS
Matrix R1 = (Matrix2() <<
1.0, 1.0,
@ -100,8 +99,7 @@ TEST(GaussianBayesNet, Evaluate2) {
/* ************************************************************************* */
TEST( GaussianBayesNet, NoisyMatrix )
{
Matrix R; Vector d;
std::tie(R,d) = noisyBayesNet.matrix(); // find matrix and RHS
const auto [R, d] = noisyBayesNet.matrix(); // find matrix and RHS
Matrix R1 = (Matrix2() <<
0.5, 0.5,
@ -123,9 +121,7 @@ TEST(GaussianBayesNet, Optimize) {
/* ************************************************************************* */
TEST(GaussianBayesNet, NoisyOptimize) {
Matrix R;
Vector d;
std::tie(R, d) = noisyBayesNet.matrix(); // find matrix and RHS
const auto [R, d] = noisyBayesNet.matrix(); // find matrix and RHS
const Vector x = R.inverse() * d;
const VectorValues expected{{_x_, x.head(1)}, {_y_, x.tail(1)}};
@ -236,9 +232,7 @@ TEST( GaussianBayesNet, MatrixStress )
KeyVector({_y_, _x_, _z_}), KeyVector({_y_, _z_, _x_}),
KeyVector({_z_, _x_, _y_}), KeyVector({_z_, _y_, _x_})}) {
const Ordering ordering(keys);
Matrix R;
Vector d;
std::tie(R, d) = bn.matrix(ordering);
const auto [R, d] = bn.matrix(ordering);
EXPECT(assert_equal(expected.vector(ordering), R.inverse() * d));
}
}

View File

@ -155,20 +155,16 @@ TEST(GaussianFactorGraph, matrices) {
// jacobian
Matrix A = Ab.leftCols(Ab.cols() - 1);
Vector b = Ab.col(Ab.cols() - 1);
Matrix actualA;
Vector actualb;
std::tie(actualA, actualb) = gfg.jacobian();
const auto [actualA, actualb] = gfg.jacobian();
EXPECT(assert_equal(A, actualA));
EXPECT(assert_equal(b, actualb));
// hessian
Matrix L = A.transpose() * A;
Vector eta = A.transpose() * b;
Matrix actualL;
Vector actualeta;
std::tie(actualL, actualeta) = gfg.hessian();
const auto [actualL, actualEta] = gfg.hessian();
EXPECT(assert_equal(L, actualL));
EXPECT(assert_equal(eta, actualeta));
EXPECT(assert_equal(eta, actualEta));
// hessianBlockDiagonal
VectorValues expectLdiagonal; // Make explicit that diagonal is sum-squares of columns
@ -245,9 +241,7 @@ TEST(GaussianFactorGraph, eliminate_empty) {
// eliminate an empty factor
GaussianFactorGraph gfg;
gfg.add(JacobianFactor());
GaussianBayesNet::shared_ptr actualBN;
GaussianFactorGraph::shared_ptr remainingGFG;
std::tie(actualBN, remainingGFG) = gfg.eliminatePartialSequential(Ordering());
const auto [actualBN, remainingGFG] = gfg.eliminatePartialSequential(Ordering());
// expected Bayes net is empty
GaussianBayesNet expectedBN;
@ -263,12 +257,8 @@ TEST(GaussianFactorGraph, eliminate_empty) {
/* ************************************************************************* */
TEST(GaussianFactorGraph, matrices2) {
GaussianFactorGraph gfg = createSimpleGaussianFactorGraph();
Matrix A;
Vector b;
std::tie(A, b) = gfg.jacobian();
Matrix AtA;
Vector eta;
std::tie(AtA, eta) = gfg.hessian();
const auto [A, b] = gfg.jacobian();
const auto [AtA, eta] = gfg.hessian();
EXPECT(assert_equal(A.transpose() * A, AtA));
EXPECT(assert_equal(A.transpose() * b, eta));
Matrix expectedAtA(6, 6);
@ -314,9 +304,7 @@ TEST(GaussianFactorGraph, multiplyHessianAdd2) {
GaussianFactorGraph gfg = createGaussianFactorGraphWithHessianFactor();
// brute force
Matrix AtA;
Vector eta;
std::tie(AtA, eta) = gfg.hessian();
const auto [AtA, eta] = gfg.hessian();
Vector X(6);
X << 1, 2, 3, 4, 5, 6;
Vector Y(6);
@ -341,12 +329,8 @@ TEST(GaussianFactorGraph, multiplyHessianAdd2) {
/* ************************************************************************* */
TEST(GaussianFactorGraph, matricesMixed) {
GaussianFactorGraph gfg = createGaussianFactorGraphWithHessianFactor();
Matrix A;
Vector b;
std::tie(A, b) = gfg.jacobian(); // incorrect !
Matrix AtA;
Vector eta;
std::tie(AtA, eta) = gfg.hessian(); // correct
const auto [A, b] = gfg.jacobian(); // incorrect !
const auto [AtA, eta] = gfg.hessian(); // correct
EXPECT(assert_equal(A.transpose() * A, AtA));
Vector expected = -(Vector(6) << -25, 17.5, 5, -13.5, 29, 4).finished();
EXPECT(assert_equal(expected, eta));

View File

@ -293,15 +293,11 @@ TEST(HessianFactor, CombineAndEliminate1) {
// perform elimination on jacobian
Ordering ordering {1};
GaussianConditional::shared_ptr expectedConditional;
JacobianFactor::shared_ptr expectedFactor;
std::tie(expectedConditional, expectedFactor) = jacobian.eliminate(ordering);
const auto [expectedConditional, expectedFactor] = jacobian.eliminate(ordering);
CHECK(expectedFactor);
// Eliminate
GaussianConditional::shared_ptr actualConditional;
HessianFactor::shared_ptr actualHessian;
std::tie(actualConditional, actualHessian) = //
const auto [actualConditional, actualHessian] = //
EliminateCholesky(gfg, ordering);
actualConditional->setModel(false,Vector3(1,1,1)); // add a unit model for comparison
@ -356,15 +352,11 @@ TEST(HessianFactor, CombineAndEliminate2) {
// perform elimination on jacobian
Ordering ordering {0};
GaussianConditional::shared_ptr expectedConditional;
JacobianFactor::shared_ptr expectedFactor;
std::tie(expectedConditional, expectedFactor) = //
const auto [expectedConditional, expectedFactor] =
jacobian.eliminate(ordering);
// Eliminate
GaussianConditional::shared_ptr actualConditional;
HessianFactor::shared_ptr actualHessian;
std::tie(actualConditional, actualHessian) = //
const auto [actualConditional, actualHessian] = //
EliminateCholesky(gfg, ordering);
actualConditional->setModel(false,Vector3(1,1,1)); // add a unit model for comparison
@ -498,7 +490,7 @@ TEST(HessianFactor, gradientAtZero)
// test gradient at zero
VectorValues expectedG{{0, -g1}, {1, -g2}};
Matrix A; Vector b; std::tie(A,b) = factor.jacobian();
const auto [A, b] = factor.jacobian();
KeyVector keys {0, 1};
EXPECT(assert_equal(-A.transpose()*b, expectedG.vector(keys)));
VectorValues actualG = factor.gradientAtZero();

View File

@ -368,7 +368,7 @@ TEST(JacobianFactor, operators )
EXPECT(assert_equal(expectedX, actualX));
// test gradient at zero
Matrix A; Vector b2; std::tie(A,b2) = lf.jacobian();
const auto [A, b2] = lf.jacobian();
VectorValues expectedG;
expectedG.insert(1, Vector2(20,-10));
expectedG.insert(2, Vector2(-20, 10));

View File

@ -123,9 +123,7 @@ TEST(GPSData, init) {
Point3 NED2(N, E, -U);
// Estimate initial state
Pose3 T;
Vector3 nV;
std::tie(T, nV) = GPSFactor::EstimateState(t1, NED1, t2, NED2, 84831.0796);
const auto [T, nV] = GPSFactor::EstimateState(t1, NED1, t2, NED2, 84831.0796);
// Check values values
EXPECT(assert_equal((Vector )Vector3(29.9575, -29.0564, -1.95993), nV, 1e-4));

View File

@ -143,9 +143,7 @@ T Expression<T>::value(const Values& values,
std::vector<Matrix>* H) const {
if (H) {
// Call private version that returns derivatives in H
KeyVector keys;
FastVector<int> dims;
std::tie(keys, dims) = keysAndDims();
const auto [keys, dims] = keysAndDims();
return valueAndDerivatives(values, keys, dims, *H);
} else
// no derivatives needed, just return value
@ -229,7 +227,7 @@ typename Expression<T>::KeysAndDims Expression<T>::keysAndDims() const {
std::map<Key, int> map;
dims(map);
size_t n = map.size();
KeysAndDims pair = std::make_pair(KeyVector(n), FastVector<int>(n));
KeysAndDims pair = {KeyVector(n), FastVector<int>(n)};
// Copy map into pair of vectors
auto key_it = pair.first.begin();
auto dim_it = pair.second.begin();

View File

@ -321,7 +321,7 @@ void ISAM2::recalculateIncremental(const ISAM2UpdateParams& updateParams,
const int group =
result->observedKeys.size() < affectedFactorsVarIndex.size() ? 1 : 0;
for (Key var : result->observedKeys)
constraintGroups.insert(std::make_pair(var, group));
constraintGroups.emplace(var, group);
}
// Remove unaffected keys from the constraints

View File

@ -330,9 +330,7 @@ void ISAM2Clique::addGradientAtZero(VectorValues* g) const {
for (auto it = conditional_->begin(); it != conditional_->end(); ++it) {
const DenseIndex dim = conditional_->getDim(it);
const Vector contribution = gradientContribution_.segment(position, dim);
VectorValues::iterator values_it;
bool success;
std::tie(values_it, success) = g->tryInsert(*it, contribution);
auto [values_it, success] = g->tryInsert(*it, contribution);
if (!success) values_it->second += contribution;
position += dim;
}

View File

@ -63,9 +63,7 @@ NonlinearConjugateGradientOptimizer::System::State NonlinearConjugateGradientOpt
}
GaussianFactorGraph::shared_ptr NonlinearConjugateGradientOptimizer::iterate() {
Values newValues;
int dummy;
std::tie(newValues, dummy) = nonlinearConjugateGradient<System, Values>(
const auto [newValues, dummy] = nonlinearConjugateGradient<System, Values>(
System(graph_), state_->values, params_, true /* single iteration */);
state_.reset(new State(newValues, graph_.error(newValues), state_->iterations + 1));
@ -76,9 +74,7 @@ GaussianFactorGraph::shared_ptr NonlinearConjugateGradientOptimizer::iterate() {
const Values& NonlinearConjugateGradientOptimizer::optimize() {
// Optimize until convergence
System system(graph_);
Values newValues;
int iterations;
std::tie(newValues, iterations) =
const auto [newValues, iterations] =
nonlinearConjugateGradient(system, state_->values, params_, false);
state_.reset(new State(std::move(newValues), graph_.error(newValues), iterations));
return state_->values;

View File

@ -159,7 +159,7 @@ std::tuple<V, int> nonlinearConjugateGradient(const S &system,
std::cout << "Exiting, as error = " << currentError << " < "
<< params.errorTol << std::endl;
}
return std::tie(initial, iteration);
return {initial, iteration};
}
V currentValues = initial;
@ -217,7 +217,7 @@ std::tuple<V, int> nonlinearConjugateGradient(const S &system,
<< "nonlinearConjugateGradient: Terminating because reached maximum iterations"
<< std::endl;
return std::tie(currentValues, iteration);
return {currentValues, iteration};
}
} // \ namespace gtsam

View File

@ -116,7 +116,7 @@ MFAS::MFAS(const TranslationEdges& relativeTranslations,
// Iterate over edges, obtain weights by projecting
// their relativeTranslations along the projection direction
for (const auto& measurement : relativeTranslations) {
edgeWeights_[std::make_pair(measurement.key1(), measurement.key2())] =
edgeWeights_[{measurement.key1(), measurement.key2()}] =
measurement.measured().dot(projectionDirection);
}
}

View File

@ -179,10 +179,8 @@ ShonanAveraging<d>::createOptimizerAt(size_t p, const Values &initial) const {
// Anchor prior is added here as depends on initial value (and cost is zero)
if (parameters_.alpha > 0) {
size_t i;
Rot value;
const size_t dim = SOn::Dimension(p);
std::tie(i, value) = parameters_.anchor;
const auto [i, value] = parameters_.anchor;
auto model = noiseModel::Isotropic::Precision(dim, parameters_.alpha);
graph.emplace_shared<PriorFactor<SOn>>(i, SOn::Lift(p, value.matrix()),
model);
@ -755,7 +753,7 @@ std::pair<double, Vector> ShonanAveraging<d>::computeMinEigenVector(
const Values &values) const {
Vector minEigenVector;
double minEigenValue = computeMinEigenValue(values, &minEigenVector);
return std::make_pair(minEigenValue, minEigenVector);
return {minEigenValue, minEigenVector};
}
/* ************************************************************************* */
@ -908,7 +906,7 @@ std::pair<Values, double> ShonanAveraging<d>::run(const Values &initialEstimate,
"When using robust norm, Shonan only tests a single rank. Set pMin = pMax");
}
const Values SO3Values = roundSolution(Qstar);
return std::make_pair(SO3Values, 0);
return {SO3Values, 0};
} else {
// Check certificate of global optimality
Vector minEigenVector;
@ -916,7 +914,7 @@ std::pair<Values, double> ShonanAveraging<d>::run(const Values &initialEstimate,
if (minEigenValue > parameters_.optimalityThreshold) {
// If at global optimum, round and return solution
const Values SO3Values = roundSolution(Qstar);
return std::make_pair(SO3Values, minEigenValue);
return {SO3Values, minEigenValue};
}
// Not at global optimimum yet, so check whether we will go to next level

View File

@ -44,11 +44,9 @@ public:
//gfg.print("gfg");
// eliminate the point
std::shared_ptr<GaussianBayesNet> bn;
GaussianFactorGraph::shared_ptr fg;
KeyVector variables;
variables.push_back(pointKey);
std::tie(bn, fg) = gfg.eliminatePartialSequential(variables, EliminateQR);
const auto [bn, fg] = gfg.eliminatePartialSequential(variables, EliminateQR);
//fg->print("fg");
JacobianFactor::operator=(JacobianFactor(*fg));

View File

@ -148,7 +148,7 @@ public:
std::pair<Matrix, Vector> jacobian() const override {
throw std::runtime_error(
"RegularImplicitSchurFactor::jacobian non implemented");
return std::make_pair(Matrix(), Vector());
return {Matrix(), Vector()};
}
/// *Compute* full augmented information matrix

View File

@ -92,9 +92,7 @@ TEST( dataSet, parseEdge)
TEST(dataSet, load2D) {
///< The structure where we will save the SfM data
const string filename = findExampleDataFile("w100.graph");
NonlinearFactorGraph::shared_ptr graph;
Values::shared_ptr initial;
std::tie(graph, initial) = load2D(filename);
const auto [graph, initial] = load2D(filename);
EXPECT_LONGS_EQUAL(300, graph->size());
EXPECT_LONGS_EQUAL(100, initial->size());
auto model = noiseModel::Unit::Create(3);
@ -135,20 +133,17 @@ TEST(dataSet, load2D) {
/* ************************************************************************* */
TEST(dataSet, load2DVictoriaPark) {
const string filename = findExampleDataFile("victoria_park.txt");
NonlinearFactorGraph::shared_ptr graph;
Values::shared_ptr initial;
// Load all
std::tie(graph, initial) = load2D(filename);
EXPECT_LONGS_EQUAL(10608, graph->size());
EXPECT_LONGS_EQUAL(7120, initial->size());
const auto [graph1, initial1] = load2D(filename);
EXPECT_LONGS_EQUAL(10608, graph1->size());
EXPECT_LONGS_EQUAL(7120, initial1->size());
// Restrict keys
size_t maxIndex = 5;
std::tie(graph, initial) = load2D(filename, nullptr, maxIndex);
EXPECT_LONGS_EQUAL(5, graph->size());
EXPECT_LONGS_EQUAL(6, initial->size()); // file has 0 as well
EXPECT_LONGS_EQUAL(L(5), graph->at(4)->keys()[1]);
const auto [graph2, initial2] = load2D(filename, nullptr, maxIndex);
EXPECT_LONGS_EQUAL(5, graph2->size());
EXPECT_LONGS_EQUAL(6, initial2->size()); // file has 0 as well
EXPECT_LONGS_EQUAL(L(5), graph2->at(4)->keys()[1]);
}
/* ************************************************************************* */
@ -218,10 +213,8 @@ TEST(dataSet, readG2o3D) {
}
// Check graph version
NonlinearFactorGraph::shared_ptr actualGraph;
Values::shared_ptr actualValues;
bool is3D = true;
std::tie(actualGraph, actualValues) = readG2o(g2oFile, is3D);
const auto [actualGraph, actualValues] = readG2o(g2oFile, is3D);
EXPECT(assert_equal(expectedGraph, *actualGraph, 1e-5));
for (size_t j : {0, 1, 2, 3, 4}) {
EXPECT(assert_equal(poses[j], actualValues->at<Pose3>(j), 1e-5));
@ -232,10 +225,8 @@ TEST(dataSet, readG2o3D) {
TEST( dataSet, readG2o3DNonDiagonalNoise)
{
const string g2oFile = findExampleDataFile("pose3example-offdiagonal.txt");
NonlinearFactorGraph::shared_ptr actualGraph;
Values::shared_ptr actualValues;
bool is3D = true;
std::tie(actualGraph, actualValues) = readG2o(g2oFile, is3D);
const auto [actualGraph, actualValues] = readG2o(g2oFile, is3D);
Values expectedValues;
Rot3 R0 = Rot3::Quaternion(1.000000, 0.000000, 0.000000, 0.000000 );
@ -327,9 +318,7 @@ static NonlinearFactorGraph expectedGraph(const SharedNoiseModel& model) {
/* ************************************************************************* */
TEST(dataSet, readG2o) {
const string g2oFile = findExampleDataFile("pose2example");
NonlinearFactorGraph::shared_ptr actualGraph;
Values::shared_ptr actualValues;
std::tie(actualGraph, actualValues) = readG2o(g2oFile);
const auto [actualGraph, actualValues] = readG2o(g2oFile);
auto model = noiseModel::Diagonal::Precisions(
Vector3(44.721360, 44.721360, 30.901699));
@ -353,10 +342,8 @@ TEST(dataSet, readG2o) {
/* ************************************************************************* */
TEST(dataSet, readG2oHuber) {
const string g2oFile = findExampleDataFile("pose2example");
NonlinearFactorGraph::shared_ptr actualGraph;
Values::shared_ptr actualValues;
bool is3D = false;
std::tie(actualGraph, actualValues) =
const auto [actualGraph, actualValues] =
readG2o(g2oFile, is3D, KernelFunctionTypeHUBER);
auto baseModel = noiseModel::Diagonal::Precisions(
@ -370,10 +357,8 @@ TEST(dataSet, readG2oHuber) {
/* ************************************************************************* */
TEST(dataSet, readG2oTukey) {
const string g2oFile = findExampleDataFile("pose2example");
NonlinearFactorGraph::shared_ptr actualGraph;
Values::shared_ptr actualValues;
bool is3D = false;
std::tie(actualGraph, actualValues) =
const auto [actualGraph, actualValues] =
readG2o(g2oFile, is3D, KernelFunctionTypeTUKEY);
auto baseModel = noiseModel::Diagonal::Precisions(
@ -388,16 +373,12 @@ TEST(dataSet, readG2oTukey) {
TEST( dataSet, writeG2o)
{
const string g2oFile = findExampleDataFile("pose2example");
NonlinearFactorGraph::shared_ptr expectedGraph;
Values::shared_ptr expectedValues;
std::tie(expectedGraph, expectedValues) = readG2o(g2oFile);
const auto [expectedGraph, expectedValues] = readG2o(g2oFile);
const string filenameToWrite = createRewrittenFileName(g2oFile);
writeG2o(*expectedGraph, *expectedValues, filenameToWrite);
NonlinearFactorGraph::shared_ptr actualGraph;
Values::shared_ptr actualValues;
std::tie(actualGraph, actualValues) = readG2o(filenameToWrite);
const auto [actualGraph, actualValues] = readG2o(filenameToWrite);
EXPECT(assert_equal(*expectedValues,*actualValues,1e-5));
EXPECT(assert_equal(*expectedGraph,*actualGraph,1e-5));
}
@ -406,17 +387,13 @@ TEST( dataSet, writeG2o)
TEST( dataSet, writeG2o3D)
{
const string g2oFile = findExampleDataFile("pose3example");
NonlinearFactorGraph::shared_ptr expectedGraph;
Values::shared_ptr expectedValues;
bool is3D = true;
std::tie(expectedGraph, expectedValues) = readG2o(g2oFile, is3D);
const auto [expectedGraph, expectedValues] = readG2o(g2oFile, is3D);
const string filenameToWrite = createRewrittenFileName(g2oFile);
writeG2o(*expectedGraph, *expectedValues, filenameToWrite);
NonlinearFactorGraph::shared_ptr actualGraph;
Values::shared_ptr actualValues;
std::tie(actualGraph, actualValues) = readG2o(filenameToWrite, is3D);
const auto [actualGraph, actualValues] = readG2o(filenameToWrite, is3D);
EXPECT(assert_equal(*expectedValues,*actualValues,1e-4));
EXPECT(assert_equal(*expectedGraph,*actualGraph,1e-4));
}
@ -425,17 +402,13 @@ TEST( dataSet, writeG2o3D)
TEST( dataSet, writeG2o3DNonDiagonalNoise)
{
const string g2oFile = findExampleDataFile("pose3example-offdiagonal");
NonlinearFactorGraph::shared_ptr expectedGraph;
Values::shared_ptr expectedValues;
bool is3D = true;
std::tie(expectedGraph, expectedValues) = readG2o(g2oFile, is3D);
const auto [expectedGraph, expectedValues] = readG2o(g2oFile, is3D);
const string filenameToWrite = createRewrittenFileName(g2oFile);
writeG2o(*expectedGraph, *expectedValues, filenameToWrite);
NonlinearFactorGraph::shared_ptr actualGraph;
Values::shared_ptr actualValues;
std::tie(actualGraph, actualValues) = readG2o(filenameToWrite, is3D);
const auto [actualGraph, actualValues] = readG2o(filenameToWrite, is3D);
EXPECT(assert_equal(*expectedValues,*actualValues,1e-4));
EXPECT(assert_equal(*expectedGraph,*actualGraph,1e-4));
}

View File

@ -33,10 +33,8 @@ using namespace gtsam;
/* ************************************************************************* */
TEST(InitializePose3, computePoses2D) {
const string g2oFile = findExampleDataFile("noisyToyGraph.txt");
NonlinearFactorGraph::shared_ptr inputGraph;
Values::shared_ptr posesInFile;
bool is3D = false;
std::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D);
const auto [inputGraph, posesInFile] = readG2o(g2oFile, is3D);
auto priorModel = noiseModel::Unit::Create(3);
inputGraph->addPrior(0, posesInFile->at<Pose2>(0), priorModel);
@ -56,10 +54,8 @@ TEST(InitializePose3, computePoses2D) {
/* ************************************************************************* */
TEST(InitializePose3, computePoses3D) {
const string g2oFile = findExampleDataFile("Klaus3");
NonlinearFactorGraph::shared_ptr inputGraph;
Values::shared_ptr posesInFile;
bool is3D = true;
std::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D);
const auto [inputGraph, posesInFile] = readG2o(g2oFile, is3D);
auto priorModel = noiseModel::Unit::Create(6);
inputGraph->addPrior(0, posesInFile->at<Pose3>(0), priorModel);

View File

@ -231,10 +231,8 @@ TEST( InitializePose3, orientationsGradient ) {
// writeG2o(pose3Graph, givenPoses, g2oFile);
const string matlabResultsfile = findExampleDataFile("simpleGraph10gradIter");
NonlinearFactorGraph::shared_ptr matlabGraph;
Values::shared_ptr matlabValues;
bool is3D = true;
std::tie(matlabGraph, matlabValues) = readG2o(matlabResultsfile, is3D);
const auto [matlabGraph, matlabValues] = readG2o(matlabResultsfile, is3D);
Rot3 R0Expected = matlabValues->at<Pose3>(1).rotation();
EXPECT(assert_equal(R0Expected, orientations.at<Rot3>(x0), 1e-4));
@ -266,10 +264,8 @@ TEST( InitializePose3, posesWithGivenGuess ) {
/* ************************************************************************* */
TEST(InitializePose3, initializePoses) {
const string g2oFile = findExampleDataFile("pose3example-grid");
NonlinearFactorGraph::shared_ptr inputGraph;
Values::shared_ptr posesInFile;
bool is3D = true;
std::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D);
const auto [inputGraph, posesInFile] = readG2o(g2oFile, is3D);
auto priorModel = noiseModel::Unit::Create(6);
inputGraph->addPrior(0, Pose3(), priorModel);

View File

@ -258,9 +258,7 @@ TEST( Lago, smallGraph2 ) {
TEST( Lago, largeGraphNoisy_orientations ) {
string inputFile = findExampleDataFile("noisyToyGraph");
NonlinearFactorGraph::shared_ptr g;
Values::shared_ptr initial;
std::tie(g, initial) = readG2o(inputFile);
const auto [g, initial] = readG2o(inputFile);
// Add prior on the pose having index (key) = 0
NonlinearFactorGraph graphWithPrior = *g;
@ -279,9 +277,7 @@ TEST( Lago, largeGraphNoisy_orientations ) {
}
}
string matlabFile = findExampleDataFile("orientationsNoisyToyGraph");
NonlinearFactorGraph::shared_ptr gmatlab;
Values::shared_ptr expected;
std::tie(gmatlab, expected) = readG2o(matlabFile);
const auto [gmatlab, expected] = readG2o(matlabFile);
for(const auto& key_pose: expected->extract<Pose2>()){
const Key& k = key_pose.first;
@ -294,9 +290,7 @@ TEST( Lago, largeGraphNoisy_orientations ) {
TEST( Lago, largeGraphNoisy ) {
string inputFile = findExampleDataFile("noisyToyGraph");
NonlinearFactorGraph::shared_ptr g;
Values::shared_ptr initial;
std::tie(g, initial) = readG2o(inputFile);
const auto [g, initial] = readG2o(inputFile);
// Add prior on the pose having index (key) = 0
NonlinearFactorGraph graphWithPrior = *g;
@ -306,9 +300,7 @@ TEST( Lago, largeGraphNoisy ) {
Values actual = lago::initialize(graphWithPrior);
string matlabFile = findExampleDataFile("optimizedNoisyToyGraph");
NonlinearFactorGraph::shared_ptr gmatlab;
Values::shared_ptr expected;
std::tie(gmatlab, expected) = readG2o(matlabFile);
const auto [gmatlab, expected] = readG2o(matlabFile);
for(const auto& key_pose: expected->extract<Pose2>()){
const Key& k = key_pose.first;

View File

@ -61,9 +61,10 @@ namespace gtsam
std::set_difference(allKeys.begin(), allKeys.end(), frontals.begin(), frontals.end(), orderedKeys.begin() + nFrontals);
// Return resulting conditional and factor
return std::make_pair(
return {
SymbolicConditional::FromKeysShared(orderedKeys, nFrontals),
SymbolicFactor::FromIteratorsShared(orderedKeys.begin() + nFrontals, orderedKeys.end()));
SymbolicFactor::FromIteratorsShared(orderedKeys.begin() + nFrontals, orderedKeys.end())
};
}
}
}

View File

@ -74,9 +74,7 @@ TEST(SymbolicFactor, EliminateSymbolic)
const SymbolicConditional expectedConditional =
SymbolicConditional::FromKeys(KeyVector{0,1,2,3,4,5,6}, 4);
SymbolicFactor::shared_ptr actualFactor;
SymbolicConditional::shared_ptr actualConditional;
std::tie(actualConditional, actualFactor) =
const auto [actualConditional, actualFactor] =
EliminateSymbolic(factors, Ordering{0, 1, 2, 3});
CHECK(assert_equal(expectedConditional, *actualConditional));

View File

@ -65,17 +65,13 @@ TEST(SymbolicFactorGraph, eliminatePartialSequential) {
const auto expectedSfg = SymbolicFactorGraph(SymbolicFactor(2, 3))(
SymbolicFactor(4, 5))(SymbolicFactor(2, 3, 4));
SymbolicBayesNet::shared_ptr actualBayesNet;
SymbolicFactorGraph::shared_ptr actualSfg;
std::tie(actualBayesNet, actualSfg) =
const auto [actualBayesNet, actualSfg] =
simpleTestGraph2.eliminatePartialSequential(Ordering{0, 1});
EXPECT(assert_equal(expectedSfg, *actualSfg));
EXPECT(assert_equal(expectedBayesNet, *actualBayesNet));
SymbolicBayesNet::shared_ptr actualBayesNet2;
SymbolicFactorGraph::shared_ptr actualSfg2;
std::tie(actualBayesNet2, actualSfg2) =
const auto [actualBayesNet2, actualSfg2] =
simpleTestGraph2.eliminatePartialSequential(Ordering{0, 1});
EXPECT(assert_equal(expectedSfg, *actualSfg2));
@ -106,9 +102,7 @@ TEST(SymbolicFactorGraph, eliminatePartialMultifrontal) {
SymbolicFactorGraph(SymbolicFactor(0, 1))(SymbolicFactor(0, 2))(
SymbolicFactor(1, 3))(SymbolicFactor(2, 3))(SymbolicFactor(1));
SymbolicBayesTree::shared_ptr actualBayesTree;
SymbolicFactorGraph::shared_ptr actualFactorGraph;
std::tie(actualBayesTree, actualFactorGraph) =
const auto [actualBayesTree, actualFactorGraph] =
simpleTestGraph2.eliminatePartialMultifrontal(Ordering{4, 5});
EXPECT(assert_equal(expectedFactorGraph, *actualFactorGraph));
@ -122,9 +116,7 @@ TEST(SymbolicFactorGraph, eliminatePartialMultifrontal) {
std::make_shared<SymbolicConditional>(5, 4)));
expectedBayesTree2.insertRoot(root2);
SymbolicBayesTree::shared_ptr actualBayesTree2;
SymbolicFactorGraph::shared_ptr actualFactorGraph2;
std::tie(actualBayesTree2, actualFactorGraph2) =
const auto [actualBayesTree2, actualFactorGraph2] =
simpleTestGraph2.eliminatePartialMultifrontal(KeyVector{4, 5});
EXPECT(assert_equal(expectedFactorGraph, *actualFactorGraph2));
@ -152,11 +144,11 @@ TEST(SymbolicFactorGraph, eliminate_disconnected_graph) {
// create expected Chordal bayes Net
SymbolicBayesNet expected;
expected.push_back(std::make_shared<SymbolicConditional>(0, 1, 2));
expected.push_back(std::make_shared<SymbolicConditional>(1, 2));
expected.push_back(std::make_shared<SymbolicConditional>(2));
expected.push_back(std::make_shared<SymbolicConditional>(3, 4));
expected.push_back(std::make_shared<SymbolicConditional>(4));
expected.emplace_shared<SymbolicConditional>(0, 1, 2);
expected.emplace_shared<SymbolicConditional>(1, 2);
expected.emplace_shared<SymbolicConditional>(2);
expected.emplace_shared<SymbolicConditional>(3, 4);
expected.emplace_shared<SymbolicConditional>(4);
Ordering order;
order += 0, 1, 2, 3, 4;

View File

@ -178,9 +178,6 @@ void solveStaged(size_t addMutex = 2) {
gttoc_(eliminate);
// find root node
// chordal->back()->print("back: ");
// chordal->front()->print("front: ");
// exit(0);
DiscreteConditional::shared_ptr root = chordal->back();
if (debug)
root->print(""/*scheduler.studentName(s)*/);
@ -211,7 +208,6 @@ DiscreteBayesNet::shared_ptr createSampler(size_t i,
SETDEBUG("Scheduler::buildGraph", false);
scheduler.addStudentSpecificConstraints(0, slot);
DiscreteBayesNet::shared_ptr chordal = scheduler.eliminate();
// chordal->print(scheduler[i].studentKey(0).name()); // large !
schedulers.push_back(scheduler);
return chordal;
}

View File

@ -79,8 +79,6 @@ class LoopyBelief {
DiscreteFactorGraph::shared_ptr iterate(
const std::map<Key, DiscreteKey>& allDiscreteKeys) {
static const bool debug = false;
static DiscreteConditional::shared_ptr
dummyCond; // unused by-product of elimination
DiscreteFactorGraph::shared_ptr beliefs(new DiscreteFactorGraph());
std::map<Key, std::map<Key, DiscreteFactor::shared_ptr> > allMessages;
// Eliminate each star graph
@ -99,8 +97,7 @@ class LoopyBelief {
subGraph.push_back(starGraphs_.at(key).star->at(factor));
}
if (debug) subGraph.print("------- Subgraph:");
DiscreteFactor::shared_ptr message;
std::tie(dummyCond, message) =
const auto [dummyCond, message] =
EliminateDiscrete(subGraph, Ordering{neighbor});
// store the new factor into messages
messages.insert(make_pair(neighbor, message));

View File

@ -130,9 +130,7 @@ TEST(InvDepthFactor, backproject)
InvDepthCamera3<Cal3_S2> inv_camera(level_pose,K);
Point2 z = inv_camera.project(expected, inv_depth);
Vector5 actual_vec;
double actual_inv;
std::tie(actual_vec, actual_inv) = inv_camera.backproject(z, 4);
const auto [actual_vec, actual_inv] = inv_camera.backproject(z, 4);
EXPECT(assert_equal(expected,actual_vec,1e-7));
EXPECT_DOUBLES_EQUAL(inv_depth,actual_inv,1e-7);
}
@ -146,9 +144,7 @@ TEST(InvDepthFactor, backproject2)
InvDepthCamera3<Cal3_S2> inv_camera(Pose3(Rot3::Ypr(1.5,0.1, -1.5), Point3(-5, -5, 2)),K);
Point2 z = inv_camera.project(expected, inv_depth);
Vector5 actual_vec;
double actual_inv;
std::tie(actual_vec, actual_inv) = inv_camera.backproject(z, 10);
const auto [actual_vec, actual_inv] = inv_camera.backproject(z, 10);
EXPECT(assert_equal(expected,actual_vec,1e-7));
EXPECT_DOUBLES_EQUAL(inv_depth,actual_inv,1e-7);
}

View File

@ -219,10 +219,8 @@ Template typename This::State This::iterate(
} else {
// If we CAN make some progress, i.e. p_k != 0
// Adapt stepsize if some inactive constraints complain about this move
double alpha;
int factorIx;
VectorValues p = newValues - state.values;
std::tie(alpha, factorIx) = // using 16.41
const auto [alpha, factorIx] = // using 16.41
computeStepSize(state.workingSet, state.values, p, POLICY::maxAlpha);
// also add to the working set the one that complains the most
InequalityFactorGraph newWorkingSet = state.workingSet;

View File

@ -69,8 +69,7 @@ TEST(LPInitSolver, InfiniteLoopSingleVar) {
LPSolver solver(lp);
VectorValues starter;
starter.insert(1, Vector3(0, 0, 2));
VectorValues results, duals;
std::tie(results, duals) = solver.optimize(starter);
const auto [results, duals] = solver.optimize(starter);
VectorValues expected;
expected.insert(1, Vector3(13.5, 6.5, -6.5));
CHECK(assert_equal(results, expected, 1e-7));
@ -97,8 +96,7 @@ TEST(LPInitSolver, InfiniteLoopMultiVar) {
starter.insert(X, kZero);
starter.insert(Y, kZero);
starter.insert(Z, Vector::Constant(1, 2.0));
VectorValues results, duals;
std::tie(results, duals) = solver.optimize(starter);
const auto [results, duals] = solver.optimize(starter);
VectorValues expected;
expected.insert(X, Vector::Constant(1, 13.5));
expected.insert(Y, Vector::Constant(1, 6.5));
@ -197,8 +195,7 @@ TEST(LPSolver, SimpleTest1) {
expected_x1.insert(1, Vector::Ones(2));
CHECK(assert_equal(expected_x1, x1, 1e-10));
VectorValues result, duals;
std::tie(result, duals) = lpSolver.optimize(init);
const auto [result, duals] = lpSolver.optimize(init);
VectorValues expectedResult;
expectedResult.insert(1, Vector2(8. / 3., 2. / 3.));
CHECK(assert_equal(expectedResult, result, 1e-10));
@ -208,9 +205,9 @@ TEST(LPSolver, SimpleTest1) {
TEST(LPSolver, TestWithoutInitialValues) {
LP lp = simpleLP1();
LPSolver lpSolver(lp);
VectorValues result, duals, expectedResult;
VectorValues expectedResult;
expectedResult.insert(1, Vector2(8. / 3., 2. / 3.));
std::tie(result, duals) = lpSolver.optimize();
const auto [result, duals] = lpSolver.optimize();
CHECK(assert_equal(expectedResult, result));
}

View File

@ -200,9 +200,7 @@ TEST(LinearEquality, operators) {
EXPECT(assert_equal(expectedX, actualX));
// test gradient at zero
Matrix A;
Vector b2;
std::tie(A, b2) = lf.jacobian();
const auto [A, b2] = lf.jacobian();
VectorValues expectedG;
expectedG.insert(1, (Vector(2) << 0.2, -0.1).finished());
expectedG.insert(2, (Vector(2) << -0.2, 0.1).finished());

View File

@ -584,10 +584,10 @@ TEST( ConcurrentIncrementalSmootherDL, synchronize_3 )
allkeys.erase(key);
}
KeyVector variables(allkeys.begin(), allkeys.end());
std::pair<GaussianBayesNet::shared_ptr, GaussianFactorGraph::shared_ptr> result = LinFactorGraph->eliminatePartialSequential(variables, EliminateCholesky);
const auto [bn, fg] = LinFactorGraph->eliminatePartialSequential(variables, EliminateCholesky);
expectedSmootherSummarization.resize(0);
for(const GaussianFactor::shared_ptr& factor: *result.second) {
for(const GaussianFactor::shared_ptr& factor: *fg) {
expectedSmootherSummarization.push_back(LinearContainerFactor(factor, allValues));
}

View File

@ -584,10 +584,10 @@ TEST( ConcurrentIncrementalSmootherGN, synchronize_3 )
KeySet allkeys = LinFactorGraph->keys();
for (const auto key : filterSeparatorValues.keys()) allkeys.erase(key);
KeyVector variables(allkeys.begin(), allkeys.end());
std::pair<GaussianBayesNet::shared_ptr, GaussianFactorGraph::shared_ptr> result = LinFactorGraph->eliminatePartialSequential(variables, EliminateCholesky);
const auto [bn, fg] = LinFactorGraph->eliminatePartialSequential(variables, EliminateCholesky);
expectedSmootherSummarization.resize(0);
for(const GaussianFactor::shared_ptr& factor: *result.second) {
for(const GaussianFactor::shared_ptr& factor: *fg) {
expectedSmootherSummarization.push_back(LinearContainerFactor(factor, allValues));
}

View File

@ -86,8 +86,8 @@ TEST(NonlinearClusterTree, Clusters) {
// Calculate expected result of only evaluating the marginalCluster
Ordering ordering;
ordering.push_back(x1);
auto expected = gfg->eliminatePartialSequential(ordering);
auto expectedFactor = std::dynamic_pointer_cast<HessianFactor>(expected.second->at(0));
const auto [bn, fg] = gfg->eliminatePartialSequential(ordering);
auto expectedFactor = std::dynamic_pointer_cast<HessianFactor>(fg->at(0));
if (!expectedFactor)
throw std::runtime_error("Expected HessianFactor");
@ -95,7 +95,7 @@ TEST(NonlinearClusterTree, Clusters) {
auto actual = marginalCluster->linearizeAndEliminate(initial);
const GaussianBayesNet& bayesNet = actual.first;
const HessianFactor& factor = *actual.second;
EXPECT(assert_equal(*expected.first->at(0), *bayesNet.at(0), 1e-6));
EXPECT(assert_equal(*bn->at(0), *bayesNet.at(0), 1e-6));
EXPECT(assert_equal(*expectedFactor, factor, 1e-6));
}

View File

@ -249,9 +249,7 @@ namespace gtsam { namespace partition {
prepareMetisGraph<GenericGraph>(graph, keys, workspace, &xadj, &adjncy, &adjwgt);
// run ND on the graph
size_t sepsize;
sharedInts part;
std::tie(sepsize, part) = separatorMetis(numKeys, xadj, adjncy, adjwgt, verbose);
const auto [sepsize, part] = separatorMetis(numKeys, xadj, adjncy, adjwgt, verbose);
if (!sepsize) return std::optional<MetisResult>();
// convert the 0-1-2 from Metis to 1-2-0, so that the separator is 0, as later
@ -309,9 +307,7 @@ namespace gtsam { namespace partition {
prepareMetisGraph<GenericGraph>(graph, keys, workspace, &xadj, &adjncy, &adjwgt);
// run metis on the graph
int edgecut;
sharedInts part;
std::tie(edgecut, part) = edgeMetis(numKeys, xadj, adjncy, adjwgt, verbose);
const auto [edgecut, part] = edgeMetis(numKeys, xadj, adjncy, adjwgt, verbose);
// convert the 0-1-2 from Metis to 1-2-0, so that the separator is 0, as later we will have more submaps
MetisResult result;

View File

@ -20,9 +20,7 @@ namespace gtsam { namespace partition {
NestedDissection<NLG, SubNLG, GenericGraph>::NestedDissection(
const NLG& fg, const Ordering& ordering, const int numNodeStopPartition, const int minNodesPerMap, const bool verbose) :
fg_(fg), ordering_(ordering){
GenericUnaryGraph unaryFactors;
GenericGraph gfg;
std::tie(unaryFactors, gfg) = fg.createGenericGraph(ordering);
const auto [unaryFactors, gfg] = fg.createGenericGraph(ordering);
// build reverse mapping from integer to symbol
int numNodes = ordering.size();
@ -44,9 +42,7 @@ namespace gtsam { namespace partition {
template <class NLG, class SubNLG, class GenericGraph>
NestedDissection<NLG, SubNLG, GenericGraph>::NestedDissection(
const NLG& fg, const Ordering& ordering, const std::shared_ptr<Cuts>& cuts, const bool verbose) : fg_(fg), ordering_(ordering){
GenericUnaryGraph unaryFactors;
GenericGraph gfg;
std::tie(unaryFactors, gfg) = fg.createGenericGraph(ordering);
const auto [unaryFactors, gfg] = fg.createGenericGraph(ordering);
// build reverse mapping from integer to symbol
int numNodes = ordering.size();

View File

@ -70,17 +70,15 @@ TEST (AHRS, constructor) {
/* ************************************************************************* */
// TODO make a testMechanization_bRn2
TEST (AHRS, Mechanization_integrate) {
AHRS ahrs = AHRS(stationaryU,stationaryF,g_e);
Mechanization_bRn2 mech;
KalmanFilter::State state;
// std::tie(mech,state) = ahrs.initialize(g_e);
// Vector u = Vector3(0.05,0.0,0.0);
// double dt = 2;
// Rot3 expected;
// Mechanization_bRn2 mech2 = mech.integrate(u,dt);
// Rot3 actual = mech2.bRn();
// EXPECT(assert_equal(expected, actual));
TEST(AHRS, Mechanization_integrate) {
AHRS ahrs = AHRS(stationaryU, stationaryF, g_e);
// const auto [mech, state] = ahrs.initialize(g_e);
// Vector u = Vector3(0.05, 0.0, 0.0);
// double dt = 2;
// Rot3 expected;
// Mechanization_bRn2 mech2 = mech.integrate(u, dt);
// Rot3 actual = mech2.bRn();
// EXPECT(assert_equal(expected, actual));
}
/* ************************************************************************* */

View File

@ -118,10 +118,8 @@ TEST( InvDepthFactor, Jacobian3D ) {
Point2 expected_uv = level_camera.project(landmark);
// get expected landmark representation using backprojection
double inv_depth;
Vector5 inv_landmark;
InvDepthCamera3<Cal3_S2> inv_camera(level_pose, K);
std::tie(inv_landmark, inv_depth) = inv_camera.backproject(expected_uv, 5);
const auto [inv_landmark, inv_depth] = inv_camera.backproject(expected_uv, 5);
Vector5 expected_inv_landmark((Vector(5) << 0., 0., 1., 0., 0.).finished());
CHECK(assert_equal(expected_inv_landmark, inv_landmark, 1e-6));

View File

@ -463,10 +463,7 @@ inline std::pair<NonlinearFactorGraph, Values> createNonlinearSmoother(int T) {
/* ************************************************************************* */
inline GaussianFactorGraph createSmoother(int T) {
NonlinearFactorGraph nlfg;
Values poses;
std::tie(nlfg, poses) = createNonlinearSmoother(T);
const auto [nlfg, poses] = createNonlinearSmoother(T);
return *nlfg.linearize(poses);
}

View File

@ -233,9 +233,7 @@ TEST(ExpressionFactor, Shallow) {
Point2_ expression = project(transformTo(x_, p_));
// Get and check keys and dims
KeyVector keys;
FastVector<int> dims;
std::tie(keys, dims) = expression.keysAndDims();
const auto [keys, dims] = expression.keysAndDims();
LONGS_EQUAL(2,keys.size());
LONGS_EQUAL(2,dims.size());
LONGS_EQUAL(1,keys[0]);

View File

@ -111,9 +111,7 @@ TEST(GaussianFactorGraph, eliminateOne_l1) {
/* ************************************************************************* */
TEST(GaussianFactorGraph, eliminateOne_x1_fast) {
GaussianFactorGraph fg = createGaussianFactorGraph();
GaussianConditional::shared_ptr conditional;
JacobianFactor::shared_ptr remaining;
std::tie(conditional, remaining) = EliminateQR(fg, Ordering{X(1)});
const auto [conditional, remaining] = EliminateQR(fg, Ordering{X(1)});
// create expected Conditional Gaussian
Matrix I = 15 * I_2x2, R11 = I, S12 = -0.111111 * I, S13 = -0.444444 * I;
@ -289,9 +287,7 @@ TEST(GaussianFactorGraph, elimination) {
GaussianBayesNet bayesNet = *fg.eliminateSequential();
// Check matrix
Matrix R;
Vector d;
std::tie(R, d) = bayesNet.matrix();
const auto [R, d] = bayesNet.matrix();
Matrix expected =
(Matrix(2, 2) << 0.707107, -0.353553, 0.0, 0.612372).finished();
Matrix expected2 =

View File

@ -246,7 +246,6 @@ bool isam_check(const NonlinearFactorGraph& fullgraph, const Values& fullinit, c
// Check gradient at each node
bool nodeGradientsOk = true;
typedef ISAM2::sharedClique sharedClique;
for (const auto& [key, clique] : isam.nodes()) {
// Compute expected gradient
GaussianFactorGraph jfg;
@ -450,7 +449,6 @@ TEST(ISAM2, swapFactors)
EXPECT(isam_check(fullgraph, fullinit, isam, *this, result_));
// Check gradient at each node
typedef ISAM2::sharedClique sharedClique;
for (const auto& [key, clique]: isam.nodes()) {
// Compute expected gradient
GaussianFactorGraph jfg;
@ -575,7 +573,6 @@ TEST(ISAM2, constrained_ordering)
EXPECT(isam_check(fullgraph, fullinit, isam, *this, result_));
// Check gradient at each node
typedef ISAM2::sharedClique sharedClique;
for (const auto& [key, clique]: isam.nodes()) {
// Compute expected gradient
GaussianFactorGraph jfg;

View File

@ -65,9 +65,7 @@ using symbol_shorthand::L;
*/
TEST( GaussianJunctionTreeB, constructor2 ) {
// create a graph
NonlinearFactorGraph nlfg;
Values values;
std::tie(nlfg, values) = createNonlinearSmoother(7);
const auto [nlfg, values] = createNonlinearSmoother(7);
SymbolicFactorGraph::shared_ptr symbolic = nlfg.symbolic();
// linearize
@ -125,43 +123,35 @@ TEST( GaussianJunctionTreeB, constructor2 ) {
}
///* ************************************************************************* */
//TEST( GaussianJunctionTreeB, optimizeMultiFrontal )
//{
// // create a graph
// GaussianFactorGraph fg;
// Ordering ordering;
// std::tie(fg,ordering) = createSmoother(7);
//
// // optimize the graph
// GaussianJunctionTree tree(fg);
// VectorValues actual = tree.optimize(&EliminateQR);
//
// // verify
// VectorValues expected(vector<size_t>(7,2)); // expected solution
// Vector v = Vector2(0., 0.);
// for (int i=1; i<=7; i++)
// expected[ordering[X(i)]] = v;
// EXPECT(assert_equal(expected,actual));
//}
//
///* ************************************************************************* */
//TEST( GaussianJunctionTreeB, optimizeMultiFrontal2)
//{
// // create a graph
// example::Graph nlfg = createNonlinearFactorGraph();
// Values noisy = createNoisyValues();
// Ordering ordering; ordering += X(1),X(2),L(1);
// GaussianFactorGraph fg = *nlfg.linearize(noisy, ordering);
//
// // optimize the graph
// GaussianJunctionTree tree(fg);
// VectorValues actual = tree.optimize(&EliminateQR);
//
// // verify
// VectorValues expected = createCorrectDelta(ordering); // expected solution
// EXPECT(assert_equal(expected,actual));
//}
//
TEST(GaussianJunctionTreeB, OptimizeMultiFrontal) {
// create a graph
const auto fg = createSmoother(7);
// optimize the graph
const VectorValues actual = fg.optimize(&EliminateQR);
// verify
VectorValues expected;
const Vector v = Vector2(0., 0.);
for (int i = 1; i <= 7; i++) expected.emplace(X(i), v);
EXPECT(assert_equal(expected, actual));
}
/* ************************************************************************* */
TEST(GaussianJunctionTreeB, optimizeMultiFrontal2) {
// create a graph
const auto nlfg = createNonlinearFactorGraph();
const auto noisy = createNoisyValues();
const auto fg = *nlfg.linearize(noisy);
// optimize the graph
VectorValues actual = fg.optimize(&EliminateQR);
// verify
VectorValues expected = createCorrectDelta(); // expected solution
EXPECT(assert_equal(expected, actual));
}
///* ************************************************************************* */
//TEST(GaussianJunctionTreeB, slamlike) {
// Values init;

View File

@ -737,9 +737,7 @@ TEST(GncOptimizer, setInlierCostThresholds) {
TEST(GncOptimizer, optimizeSmallPoseGraph) {
/// load small pose graph
const string filename = findExampleDataFile("w100.graph");
NonlinearFactorGraph::shared_ptr graph;
Values::shared_ptr initial;
std::tie(graph, initial) = load2D(filename);
const auto [graph, initial] = load2D(filename);
// Add a Gaussian prior on first poses
Pose2 priorMean(0.0, 0.0, 0.0); // prior at origin
SharedDiagonal priorNoise = noiseModel::Diagonal::Sigmas(

View File

@ -63,16 +63,12 @@ std::tuple<NonlinearFactorGraph, Values> generateProblem() {
Pose2 x5(2.1, 2.1, -M_PI_2);
initialEstimate.insert(5, x5);
return std::tie(graph, initialEstimate);
return {graph, initialEstimate};
}
/* ************************************************************************* */
TEST(NonlinearConjugateGradientOptimizer, Optimize) {
NonlinearFactorGraph graph;
Values initialEstimate;
std::tie(graph, initialEstimate) = generateProblem();
const auto [graph, initialEstimate] = generateProblem();
// cout << "initial error = " << graph.error(initialEstimate) << endl;
NonlinearOptimizerParams param;

View File

@ -59,10 +59,8 @@ TEST( Iterative, conjugateGradientDescent )
VectorValues expected = fg.optimize();
// get matrices
Matrix A;
Vector b;
Vector x0 = Z_6x1;
std::tie(A, b) = fg.jacobian();
const auto [A, b] = fg.jacobian();
Vector expectedX = (Vector(6) << -0.1, 0.1, -0.1, -0.1, 0.1, -0.2).finished();
// Do conjugate gradient descent, System version

View File

@ -62,9 +62,7 @@ static double error(const GaussianFactorGraph& fg, const VectorValues& x) {
/* ************************************************************************* */
TEST(SubgraphPreconditioner, planarGraph) {
// Check planar graph construction
GaussianFactorGraph A;
VectorValues xtrue;
std::tie(A, xtrue) = planarGraph(3);
const auto [A, xtrue] = planarGraph(3);
LONGS_EQUAL(13, A.size());
LONGS_EQUAL(9, xtrue.size());
DOUBLES_EQUAL(0, error(A, xtrue), 1e-9); // check zero error for xtrue
@ -78,13 +76,10 @@ TEST(SubgraphPreconditioner, planarGraph) {
/* ************************************************************************* */
TEST(SubgraphPreconditioner, splitOffPlanarTree) {
// Build a planar graph
GaussianFactorGraph A;
VectorValues xtrue;
std::tie(A, xtrue) = planarGraph(3);
const auto [A, xtrue] = planarGraph(3);
// Get the spanning tree and constraints, and check their sizes
GaussianFactorGraph T, C;
std::tie(T, C) = splitOffPlanarTree(3, A);
const auto [T, C] = splitOffPlanarTree(3, A);
LONGS_EQUAL(9, T.size());
LONGS_EQUAL(4, C.size());
@ -97,14 +92,11 @@ TEST(SubgraphPreconditioner, splitOffPlanarTree) {
/* ************************************************************************* */
TEST(SubgraphPreconditioner, system) {
// Build a planar graph
GaussianFactorGraph Ab;
VectorValues xtrue;
size_t N = 3;
std::tie(Ab, xtrue) = planarGraph(N); // A*x-b
const auto [Ab, xtrue] = planarGraph(N); // A*x-b
// Get the spanning tree and remaining graph
GaussianFactorGraph Ab1, Ab2; // A1*x-b1 and A2*x-b2
std::tie(Ab1, Ab2) = splitOffPlanarTree(N, Ab);
auto [Ab1, Ab2] = splitOffPlanarTree(N, Ab);
// Eliminate the spanning tree to build a prior
const Ordering ord = planarOrdering(N);
@ -120,11 +112,9 @@ TEST(SubgraphPreconditioner, system) {
Ab2.add(key(1, 1), Z_2x2, Z_2x1);
Ab2.add(key(1, 2), Z_2x2, Z_2x1);
Ab2.add(key(1, 3), Z_2x2, Z_2x1);
Matrix A, A1, A2;
Vector b, b1, b2;
std::tie(A, b) = Ab.jacobian(ordering);
std::tie(A1, b1) = Ab1.jacobian(ordering);
std::tie(A2, b2) = Ab2.jacobian(ordering);
const auto [A, b] = Ab.jacobian(ordering);
const auto [A1, b1] = Ab1.jacobian(ordering);
const auto [A2, b2] = Ab2.jacobian(ordering);
Matrix R1 = Rc1.matrix(ordering).first;
Matrix Abar(13 * 2, 9 * 2);
Abar.topRows(9 * 2) = Matrix::Identity(9 * 2, 9 * 2);
@ -193,14 +183,11 @@ TEST(SubgraphPreconditioner, system) {
/* ************************************************************************* */
TEST(SubgraphPreconditioner, conjugateGradients) {
// Build a planar graph
GaussianFactorGraph Ab;
VectorValues xtrue;
size_t N = 3;
std::tie(Ab, xtrue) = planarGraph(N); // A*x-b
const auto [Ab, xtrue] = planarGraph(N); // A*x-b
// Get the spanning tree
GaussianFactorGraph Ab1, Ab2; // A1*x-b1 and A2*x-b2
std::tie(Ab1, Ab2) = splitOffPlanarTree(N, Ab);
const auto [Ab1, Ab2] = splitOffPlanarTree(N, Ab);
// Eliminate the spanning tree to build a prior
GaussianBayesNet Rc1 = *Ab1.eliminateSequential(); // R1*x-c1

View File

@ -55,9 +55,7 @@ TEST( SubgraphSolver, Parameters )
TEST( SubgraphSolver, splitFactorGraph )
{
// Build a planar graph
GaussianFactorGraph Ab;
VectorValues xtrue;
std::tie(Ab, xtrue) = example::planarGraph(N); // A*x-b
const auto [Ab, xtrue] = example::planarGraph(N); // A*x-b
SubgraphBuilderParameters params;
params.augmentationFactor = 0.0;
@ -65,8 +63,7 @@ TEST( SubgraphSolver, splitFactorGraph )
auto subgraph = builder(Ab);
EXPECT_LONGS_EQUAL(9, subgraph.size());
GaussianFactorGraph Ab1, Ab2;
std::tie(Ab1, Ab2) = splitFactorGraph(Ab, subgraph);
const auto [Ab1, Ab2] = splitFactorGraph(Ab, subgraph);
EXPECT_LONGS_EQUAL(9, Ab1.size());
EXPECT_LONGS_EQUAL(13, Ab2.size());
}
@ -75,9 +72,7 @@ TEST( SubgraphSolver, splitFactorGraph )
TEST( SubgraphSolver, constructor1 )
{
// Build a planar graph
GaussianFactorGraph Ab;
VectorValues xtrue;
std::tie(Ab, xtrue) = example::planarGraph(N); // A*x-b
const auto [Ab, xtrue] = example::planarGraph(N); // A*x-b
// The first constructor just takes a factor graph (and kParameters)
// and it will split the graph into A1 and A2, where A1 is a spanning tree
@ -90,14 +85,11 @@ TEST( SubgraphSolver, constructor1 )
TEST( SubgraphSolver, constructor2 )
{
// Build a planar graph
GaussianFactorGraph Ab;
VectorValues xtrue;
size_t N = 3;
std::tie(Ab, xtrue) = example::planarGraph(N); // A*x-b
const auto [Ab, xtrue] = example::planarGraph(N); // A*x-b
// Get the spanning tree
GaussianFactorGraph Ab1, Ab2; // A1*x-b1 and A2*x-b2
std::tie(Ab1, Ab2) = example::splitOffPlanarTree(N, Ab);
// Get the spanning tree, A1*x-b1 and A2*x-b2
const auto [Ab1, Ab2] = example::splitOffPlanarTree(N, Ab);
// The second constructor takes two factor graphs, so the caller can specify
// the preconditioner (Ab1) and the constraints that are left out (Ab2)
@ -110,14 +102,12 @@ TEST( SubgraphSolver, constructor2 )
TEST( SubgraphSolver, constructor3 )
{
// Build a planar graph
GaussianFactorGraph Ab;
VectorValues xtrue;
size_t N = 3;
std::tie(Ab, xtrue) = example::planarGraph(N); // A*x-b
const auto [Ab, xtrue] = example::planarGraph(N); // A*x-b
// Get the spanning tree and corresponding kOrdering
GaussianFactorGraph Ab1, Ab2; // A1*x-b1 and A2*x-b2
std::tie(Ab1, Ab2) = example::splitOffPlanarTree(N, Ab);
// A1*x-b1 and A2*x-b2
const auto [Ab1, Ab2] = example::splitOffPlanarTree(N, Ab);
// The caller solves |A1*x-b1|^2 == |R1*x-c1|^2, where R1 is square UT
auto Rc1 = *Ab1.eliminateSequential();

View File

@ -33,11 +33,9 @@ int main(int argc, char *argv[]) {
size_t trials = 1;
// read graph
Values::shared_ptr solution;
NonlinearFactorGraph::shared_ptr g;
string inputFile = findExampleDataFile("w10000");
auto model = noiseModel::Diagonal::Sigmas((Vector(3) << 0.05, 0.05, 5.0 * M_PI / 180.0).finished());
std::tie(g, solution) = load2D(inputFile, model);
const auto [g, solution] = load2D(inputFile, model);
// add noise to create initial estimate
Values initial;