Replace std::tie with c++17 pattern matching
parent
6c0cab25a3
commit
ae7c17420d
|
@ -249,8 +249,7 @@ pair<Matrix,Matrix> qr(const Matrix& A) {
|
|||
xjm(k) = R(j+k, j);
|
||||
|
||||
// calculate the Householder vector v
|
||||
double beta; Vector vjm;
|
||||
std::tie(beta,vjm) = house(xjm);
|
||||
const auto [beta, vjm] = house(xjm);
|
||||
|
||||
// pad with zeros to get m-dimensional vector v
|
||||
for(size_t k = 0 ; k < m; k++)
|
||||
|
|
|
@ -857,8 +857,7 @@ TEST(Matrix, qr )
|
|||
Matrix expectedR = (Matrix(6, 4) << 15, 0, -8.3333, 0, 00, 11.1803, 0, -2.2361, 00, 0,
|
||||
7.4536, 0, 00, 0, 0, 10.9545, 00, 0, 0, 0, 00, 0, 0, 0).finished();
|
||||
|
||||
Matrix Q, R;
|
||||
std::tie(Q, R) = qr(A);
|
||||
const auto [Q, R] = qr(A);
|
||||
EXPECT(assert_equal(expectedQ, Q, 1e-4));
|
||||
EXPECT(assert_equal(expectedR, R, 1e-4));
|
||||
EXPECT(assert_equal(A, Q*R, 1e-14));
|
||||
|
@ -915,10 +914,7 @@ TEST(Matrix, weighted_elimination )
|
|||
|
||||
// unpack and verify
|
||||
size_t i = 0;
|
||||
for (const auto& tuple : solution) {
|
||||
Vector r;
|
||||
double di, sigma;
|
||||
std::tie(r, di, sigma) = tuple;
|
||||
for (const auto& [r, di, sigma] : solution) {
|
||||
EXPECT(assert_equal(r, expectedR.row(i))); // verify r
|
||||
DOUBLES_EQUAL(d(i), di, 1e-8); // verify d
|
||||
DOUBLES_EQUAL(newSigmas(i), sigma, 1e-5); // verify sigma
|
||||
|
@ -1142,10 +1138,7 @@ TEST(Matrix, DLT )
|
|||
1.89, 2.24, 3.99, 3.24, 3.84, 6.84, 18.09, 21.44, 38.19,
|
||||
2.24, 2.48, 6.24, 3.08, 3.41, 8.58, 24.64, 27.28, 68.64
|
||||
).finished();
|
||||
int rank;
|
||||
double error;
|
||||
Vector actual;
|
||||
std::tie(rank,error,actual) = DLT(A);
|
||||
const auto [rank,error,actual] = DLT(A);
|
||||
Vector expected = (Vector(9) << -0.0, 0.2357, 0.4714, -0.2357, 0.0, - 0.4714,-0.4714, 0.4714, 0.0).finished();
|
||||
EXPECT_LONGS_EQUAL(8,rank);
|
||||
EXPECT_DOUBLES_EQUAL(0,error,1e-8);
|
||||
|
|
|
@ -154,8 +154,7 @@ TEST(Vector, weightedPseudoinverse )
|
|||
Vector weights = sigmas.array().square().inverse();
|
||||
|
||||
// perform solve
|
||||
Vector actual; double precision;
|
||||
std::tie(actual, precision) = weightedPseudoinverse(x, weights);
|
||||
const auto [actual, precision] = weightedPseudoinverse(x, weights);
|
||||
|
||||
// construct expected
|
||||
Vector expected(2);
|
||||
|
@ -179,8 +178,7 @@ TEST(Vector, weightedPseudoinverse_constraint )
|
|||
sigmas(0) = 0.0; sigmas(1) = 0.2;
|
||||
Vector weights = sigmas.array().square().inverse();
|
||||
// perform solve
|
||||
Vector actual; double precision;
|
||||
std::tie(actual, precision) = weightedPseudoinverse(x, weights);
|
||||
const auto [actual, precision] = weightedPseudoinverse(x, weights);
|
||||
|
||||
// construct expected
|
||||
Vector expected(2);
|
||||
|
@ -197,8 +195,7 @@ TEST(Vector, weightedPseudoinverse_nan )
|
|||
Vector a = (Vector(4) << 1., 0., 0., 0.).finished();
|
||||
Vector sigmas = (Vector(4) << 0.1, 0.1, 0., 0.).finished();
|
||||
Vector weights = sigmas.array().square().inverse();
|
||||
Vector pseudo; double precision;
|
||||
std::tie(pseudo, precision) = weightedPseudoinverse(a, weights);
|
||||
const auto [pseudo, precision] = weightedPseudoinverse(a, weights);
|
||||
|
||||
Vector expected = (Vector(4) << 1., 0., 0.,0.).finished();
|
||||
EXPECT(assert_equal(expected, pseudo));
|
||||
|
|
|
@ -458,9 +458,7 @@ TEST(DecisionTree, unzip) {
|
|||
DTP tree(B, DTP(A, {0, "zero"}, {1, "one"}),
|
||||
DTP(A, {2, "two"}, {1337, "l33t"}));
|
||||
|
||||
DT1 dt1;
|
||||
DT2 dt2;
|
||||
std::tie(dt1, dt2) = unzip(tree);
|
||||
const auto [dt1, dt2] = unzip(tree);
|
||||
|
||||
DT1 tree1(B, DT1(A, 0, 1), DT1(A, 2, 1337));
|
||||
DT2 tree2(B, DT2(A, "zero", "one"), DT2(A, "two", "l33t"));
|
||||
|
|
|
@ -109,9 +109,7 @@ TEST(DiscreteFactorGraph, test) {
|
|||
// Test EliminateDiscrete
|
||||
Ordering frontalKeys;
|
||||
frontalKeys += Key(0);
|
||||
DiscreteConditional::shared_ptr conditional;
|
||||
DecisionTreeFactor::shared_ptr newFactor;
|
||||
std::tie(conditional, newFactor) = EliminateDiscrete(graph, frontalKeys);
|
||||
const auto [conditional, newFactor] = EliminateDiscrete(graph, frontalKeys);
|
||||
|
||||
// Check Conditional
|
||||
CHECK(conditional);
|
||||
|
@ -128,9 +126,7 @@ TEST(DiscreteFactorGraph, test) {
|
|||
Ordering ordering;
|
||||
ordering += Key(0), Key(1), Key(2);
|
||||
DiscreteEliminationTree etree(graph, ordering);
|
||||
DiscreteBayesNet::shared_ptr actual;
|
||||
DiscreteFactorGraph::shared_ptr remainingGraph;
|
||||
std::tie(actual, remainingGraph) = etree.eliminate(&EliminateDiscrete);
|
||||
const auto [actual, remainingGraph] = etree.eliminate(&EliminateDiscrete);
|
||||
|
||||
// Check Bayes net
|
||||
DiscreteBayesNet expectedBayesNet;
|
||||
|
|
|
@ -31,9 +31,9 @@ namespace internal {
|
|||
static Point2Pairs SubtractCentroids(const Point2Pairs& abPointPairs,
|
||||
const Point2Pair& centroids) {
|
||||
Point2Pairs d_abPointPairs;
|
||||
for (const Point2Pair& abPair : abPointPairs) {
|
||||
Point2 da = abPair.first - centroids.first;
|
||||
Point2 db = abPair.second - centroids.second;
|
||||
for (const auto& [a, b] : abPointPairs) {
|
||||
Point2 da = a - centroids.first;
|
||||
Point2 db = b - centroids.second;
|
||||
d_abPointPairs.emplace_back(da, db);
|
||||
}
|
||||
return d_abPointPairs;
|
||||
|
@ -43,10 +43,8 @@ static Point2Pairs SubtractCentroids(const Point2Pairs& abPointPairs,
|
|||
static double CalculateScale(const Point2Pairs& d_abPointPairs,
|
||||
const Rot2& aRb) {
|
||||
double x = 0, y = 0;
|
||||
Point2 da, db;
|
||||
|
||||
for (const Point2Pair& d_abPair : d_abPointPairs) {
|
||||
std::tie(da, db) = d_abPair;
|
||||
for (const auto& [da, db] : d_abPointPairs) {
|
||||
const Vector2 da_prime = aRb * db;
|
||||
y += da.transpose() * da_prime;
|
||||
x += da_prime.transpose() * da_prime;
|
||||
|
@ -58,8 +56,8 @@ static double CalculateScale(const Point2Pairs& d_abPointPairs,
|
|||
/// Form outer product H.
|
||||
static Matrix2 CalculateH(const Point2Pairs& d_abPointPairs) {
|
||||
Matrix2 H = Z_2x2;
|
||||
for (const Point2Pair& d_abPair : d_abPointPairs) {
|
||||
H += d_abPair.first * d_abPair.second.transpose();
|
||||
for (const auto& [da, db] : d_abPointPairs) {
|
||||
H += da * db.transpose();
|
||||
}
|
||||
return H;
|
||||
}
|
||||
|
@ -186,9 +184,7 @@ Similarity2 Similarity2::Align(const Pose2Pairs& abPosePairs) {
|
|||
abPointPairs.reserve(n);
|
||||
// Below denotes the pose of the i'th object/camera/etc
|
||||
// in frame "a" or frame "b".
|
||||
Pose2 aTi, bTi;
|
||||
for (const Pose2Pair& abPair : abPosePairs) {
|
||||
std::tie(aTi, bTi) = abPair;
|
||||
for (const auto& [aTi, bTi] : abPosePairs) {
|
||||
const Rot2 aRb = aTi.rotation().compose(bTi.rotation().inverse());
|
||||
rotations.emplace_back(aRb);
|
||||
abPointPairs.emplace_back(aTi.translation(), bTi.translation());
|
||||
|
|
|
@ -31,9 +31,9 @@ namespace internal {
|
|||
static Point3Pairs subtractCentroids(const Point3Pairs &abPointPairs,
|
||||
const Point3Pair ¢roids) {
|
||||
Point3Pairs d_abPointPairs;
|
||||
for (const Point3Pair& abPair : abPointPairs) {
|
||||
Point3 da = abPair.first - centroids.first;
|
||||
Point3 db = abPair.second - centroids.second;
|
||||
for (const auto& [a, b] : abPointPairs) {
|
||||
Point3 da = a - centroids.first;
|
||||
Point3 db = b - centroids.second;
|
||||
d_abPointPairs.emplace_back(da, db);
|
||||
}
|
||||
return d_abPointPairs;
|
||||
|
@ -46,8 +46,7 @@ static double calculateScale(const Point3Pairs &d_abPointPairs,
|
|||
const Rot3 &aRb) {
|
||||
double x = 0, y = 0;
|
||||
Point3 da, db;
|
||||
for (const Point3Pair& d_abPair : d_abPointPairs) {
|
||||
std::tie(da, db) = d_abPair;
|
||||
for (const auto& [da, db] : d_abPointPairs) {
|
||||
const Vector3 da_prime = aRb * db;
|
||||
y += da.transpose() * da_prime;
|
||||
x += da_prime.transpose() * da_prime;
|
||||
|
@ -59,8 +58,8 @@ static double calculateScale(const Point3Pairs &d_abPointPairs,
|
|||
/// Form outer product H.
|
||||
static Matrix3 calculateH(const Point3Pairs &d_abPointPairs) {
|
||||
Matrix3 H = Z_3x3;
|
||||
for (const Point3Pair& d_abPair : d_abPointPairs) {
|
||||
H += d_abPair.first * d_abPair.second.transpose();
|
||||
for (const auto& [da, db] : d_abPointPairs) {
|
||||
H += da * db.transpose();
|
||||
}
|
||||
return H;
|
||||
}
|
||||
|
@ -184,8 +183,7 @@ Similarity3 Similarity3::Align(const vector<Pose3Pair> &abPosePairs) {
|
|||
abPointPairs.reserve(n);
|
||||
// Below denotes the pose of the i'th object/camera/etc in frame "a" or frame "b"
|
||||
Pose3 aTi, bTi;
|
||||
for (const Pose3Pair &abPair : abPosePairs) {
|
||||
std::tie(aTi, bTi) = abPair;
|
||||
for (const auto &[aTi, bTi] : abPosePairs) {
|
||||
const Rot3 aRb = aTi.rotation().compose(bTi.rotation().inverse());
|
||||
rotations.emplace_back(aRb);
|
||||
abPointPairs.emplace_back(aTi.translation(), bTi.translation());
|
||||
|
|
|
@ -128,10 +128,8 @@ TEST( Rot3, AxisAngle2)
|
|||
// constructor from a rotation matrix, as doubles in *row-major* order.
|
||||
Rot3 R1(-0.999957, 0.00922903, 0.00203116, 0.00926964, 0.999739, 0.0208927, -0.0018374, 0.0209105, -0.999781);
|
||||
|
||||
Unit3 actualAxis;
|
||||
double actualAngle;
|
||||
// convert Rot3 to quaternion using GTSAM
|
||||
std::tie(actualAxis, actualAngle) = R1.axisAngle();
|
||||
const auto [actualAxis, actualAngle] = R1.axisAngle();
|
||||
|
||||
double expectedAngle = 3.1396582;
|
||||
CHECK(assert_equal(expectedAngle, actualAngle, 1e-5));
|
||||
|
@ -508,11 +506,9 @@ TEST( Rot3, yaw_pitch_roll )
|
|||
TEST( Rot3, RQ)
|
||||
{
|
||||
// Try RQ on a pure rotation
|
||||
Matrix actualK;
|
||||
Vector actual;
|
||||
std::tie(actualK, actual) = RQ(R.matrix());
|
||||
const auto [actualK, actual] = RQ(R.matrix());
|
||||
Vector expected = Vector3(0.14715, 0.385821, 0.231671);
|
||||
CHECK(assert_equal(I_3x3,actualK));
|
||||
CHECK(assert_equal(I_3x3, (Matrix)actualK));
|
||||
CHECK(assert_equal(expected,actual,1e-6));
|
||||
|
||||
// Try using xyz call, asserting that Rot3::RzRyRx(x,y,z).xyz()==[x;y;z]
|
||||
|
@ -531,9 +527,9 @@ TEST( Rot3, RQ)
|
|||
// Try RQ to recover calibration from 3*3 sub-block of projection matrix
|
||||
Matrix K = (Matrix(3, 3) << 500.0, 0.0, 320.0, 0.0, 500.0, 240.0, 0.0, 0.0, 1.0).finished();
|
||||
Matrix A = K * R.matrix();
|
||||
std::tie(actualK, actual) = RQ(A);
|
||||
CHECK(assert_equal(K,actualK));
|
||||
CHECK(assert_equal(expected,actual,1e-6));
|
||||
const auto [actualK2, actual2] = RQ(A);
|
||||
CHECK(assert_equal(K, actualK2));
|
||||
CHECK(assert_equal(expected, actual2, 1e-6));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -48,10 +48,7 @@ Vector4 triangulateHomogeneousDLT(
|
|||
A.row(row) = p.x() * projection.row(2) - projection.row(0);
|
||||
A.row(row + 1) = p.y() * projection.row(2) - projection.row(1);
|
||||
}
|
||||
int rank;
|
||||
double error;
|
||||
Vector v;
|
||||
std::tie(rank, error, v) = DLT(A, rank_tol);
|
||||
const auto [rank, error, v] = DLT(A, rank_tol);
|
||||
|
||||
if (rank < 3) throw(TriangulationUnderconstrainedException());
|
||||
|
||||
|
@ -79,10 +76,7 @@ Vector4 triangulateHomogeneousDLT(
|
|||
A.row(row) = p.x() * projection.row(2) - p.z() * projection.row(0);
|
||||
A.row(row + 1) = p.y() * projection.row(2) - p.z() * projection.row(1);
|
||||
}
|
||||
int rank;
|
||||
double error;
|
||||
Vector v;
|
||||
std::tie(rank, error, v) = DLT(A, rank_tol);
|
||||
const auto [rank, error, v] = DLT(A, rank_tol);
|
||||
|
||||
if (rank < 3) throw(TriangulationUnderconstrainedException());
|
||||
|
||||
|
|
|
@ -193,9 +193,7 @@ Point3 triangulateNonlinear(const std::vector<Pose3>& poses,
|
|||
const SharedNoiseModel& model = nullptr) {
|
||||
|
||||
// Create a factor graph and initial values
|
||||
Values values;
|
||||
NonlinearFactorGraph graph;
|
||||
std::tie(graph, values) = triangulationGraph<CALIBRATION> //
|
||||
const auto [graph, values] = triangulationGraph<CALIBRATION> //
|
||||
(poses, sharedCal, measurements, Symbol('p', 0), initialEstimate, model);
|
||||
|
||||
return optimize(graph, values, Symbol('p', 0));
|
||||
|
@ -215,9 +213,7 @@ Point3 triangulateNonlinear(
|
|||
const SharedNoiseModel& model = nullptr) {
|
||||
|
||||
// Create a factor graph and initial values
|
||||
Values values;
|
||||
NonlinearFactorGraph graph;
|
||||
std::tie(graph, values) = triangulationGraph<CAMERA> //
|
||||
const auto [graph, values] = triangulationGraph<CAMERA> //
|
||||
(cameras, measurements, Symbol('p', 0), initialEstimate, model);
|
||||
|
||||
return optimize(graph, values, Symbol('p', 0));
|
||||
|
|
|
@ -251,9 +251,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors,
|
|||
#endif
|
||||
|
||||
// Separate out decision tree into conditionals and remaining factors.
|
||||
GaussianMixture::Conditionals conditionals;
|
||||
GaussianMixtureFactor::Factors newFactors;
|
||||
std::tie(conditionals, newFactors) = unzip(eliminationResults);
|
||||
const auto [conditionals, newFactors] = unzip(eliminationResults);
|
||||
|
||||
// Create the GaussianMixture from the conditionals
|
||||
auto gaussianMixture = std::make_shared<GaussianMixture>(
|
||||
|
|
|
@ -100,9 +100,7 @@ struct HybridConstructorTraversalData {
|
|||
|
||||
Ordering keyAsOrdering;
|
||||
keyAsOrdering.push_back(node->key);
|
||||
SymbolicConditional::shared_ptr conditional;
|
||||
SymbolicFactor::shared_ptr separatorFactor;
|
||||
std::tie(conditional, separatorFactor) =
|
||||
const auto [conditional, separatorFactor] =
|
||||
internal::EliminateSymbolic(symbolicFactors, keyAsOrdering);
|
||||
|
||||
// Store symbolic elimination results in the parent
|
||||
|
|
|
@ -160,9 +160,7 @@ TEST(HybridBayesNet, OptimizeAssignment) {
|
|||
|
||||
const Ordering ordering(s.linearizationPoint.keys());
|
||||
|
||||
HybridBayesNet::shared_ptr hybridBayesNet;
|
||||
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
|
||||
std::tie(hybridBayesNet, remainingFactorGraph) =
|
||||
const auto [hybridBayesNet, remainingFactorGraph] =
|
||||
s.linearizedFactorGraph.eliminatePartialSequential(ordering);
|
||||
|
||||
DiscreteValues assignment;
|
||||
|
|
|
@ -325,10 +325,9 @@ TEST(HybridGaussianFactorGraph, Switching) {
|
|||
std::transform(naturalX.begin(), naturalX.end(), std::back_inserter(ordX),
|
||||
[](int x) { return X(x); });
|
||||
|
||||
KeyVector ndX;
|
||||
std::vector<int> lvls;
|
||||
std::tie(ndX, lvls) = makeBinaryOrdering(ordX);
|
||||
auto [ndX, lvls] = makeBinaryOrdering(ordX);
|
||||
std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering));
|
||||
// TODO(dellaert): this has no effect!
|
||||
for (auto &l : lvls) {
|
||||
l = -l;
|
||||
}
|
||||
|
@ -339,11 +338,9 @@ TEST(HybridGaussianFactorGraph, Switching) {
|
|||
std::vector<Key> ordC;
|
||||
std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC),
|
||||
[](int x) { return M(x); });
|
||||
KeyVector ndC;
|
||||
std::vector<int> lvls;
|
||||
|
||||
// std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering));
|
||||
std::tie(ndC, lvls) = makeBinaryOrdering(ordC);
|
||||
const auto [ndC, lvls] = makeBinaryOrdering(ordC);
|
||||
std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering));
|
||||
}
|
||||
auto ordering_full = Ordering(ordering);
|
||||
|
@ -384,10 +381,9 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) {
|
|||
std::transform(naturalX.begin(), naturalX.end(), std::back_inserter(ordX),
|
||||
[](int x) { return X(x); });
|
||||
|
||||
KeyVector ndX;
|
||||
std::vector<int> lvls;
|
||||
std::tie(ndX, lvls) = makeBinaryOrdering(ordX);
|
||||
auto [ndX, lvls] = makeBinaryOrdering(ordX);
|
||||
std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering));
|
||||
// TODO(dellaert): this has no effect!
|
||||
for (auto &l : lvls) {
|
||||
l = -l;
|
||||
}
|
||||
|
@ -398,11 +394,9 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) {
|
|||
std::vector<Key> ordC;
|
||||
std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC),
|
||||
[](int x) { return M(x); });
|
||||
KeyVector ndC;
|
||||
std::vector<int> lvls;
|
||||
|
||||
// std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering));
|
||||
std::tie(ndC, lvls) = makeBinaryOrdering(ordC);
|
||||
const auto [ndC, lvls] = makeBinaryOrdering(ordC);
|
||||
std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering));
|
||||
}
|
||||
auto ordering_full = Ordering(ordering);
|
||||
|
@ -476,9 +470,7 @@ TEST(HybridGaussianFactorGraph, SwitchingTwoVar) {
|
|||
ordering_partial.emplace_back(X(i));
|
||||
ordering_partial.emplace_back(Y(i));
|
||||
}
|
||||
HybridBayesNet::shared_ptr hbn;
|
||||
HybridGaussianFactorGraph::shared_ptr remaining;
|
||||
std::tie(hbn, remaining) = hfg->eliminatePartialSequential(ordering_partial);
|
||||
const auto [hbn, remaining] = hfg->eliminatePartialSequential(ordering_partial);
|
||||
|
||||
EXPECT_LONGS_EQUAL(14, hbn->size());
|
||||
EXPECT_LONGS_EQUAL(11, remaining->size());
|
||||
|
|
|
@ -350,10 +350,7 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) {
|
|||
ordering += X(0);
|
||||
ordering += X(1);
|
||||
|
||||
HybridConditional::shared_ptr hybridConditionalMixture;
|
||||
std::shared_ptr<Factor> factorOnModes;
|
||||
|
||||
std::tie(hybridConditionalMixture, factorOnModes) =
|
||||
const auto [hybridConditionalMixture, factorOnModes] =
|
||||
EliminateHybrid(factors, ordering);
|
||||
|
||||
auto gaussianConditionalMixture =
|
||||
|
|
|
@ -360,9 +360,10 @@ namespace gtsam {
|
|||
C1_minus_B.assign(C1_minus_B_set.begin(), C1_minus_B_set.end());
|
||||
}
|
||||
// Factor into C1\B | B.
|
||||
sharedFactorGraph temp_remaining;
|
||||
std::tie(p_C1_B, temp_remaining) =
|
||||
FactorGraphType(p_C1_Bred).eliminatePartialMultifrontal(Ordering(C1_minus_B), function);
|
||||
p_C1_B =
|
||||
FactorGraphType(p_C1_Bred)
|
||||
.eliminatePartialMultifrontal(Ordering(C1_minus_B), function)
|
||||
.first;
|
||||
}
|
||||
std::shared_ptr<typename EliminationTraitsType::BayesTreeType> p_C2_B; {
|
||||
KeyVector C2_minus_B; {
|
||||
|
@ -372,9 +373,10 @@ namespace gtsam {
|
|||
C2_minus_B.assign(C2_minus_B_set.begin(), C2_minus_B_set.end());
|
||||
}
|
||||
// Factor into C2\B | B.
|
||||
sharedFactorGraph temp_remaining;
|
||||
std::tie(p_C2_B, temp_remaining) =
|
||||
FactorGraphType(p_C2_Bred).eliminatePartialMultifrontal(Ordering(C2_minus_B), function);
|
||||
p_C2_B =
|
||||
FactorGraphType(p_C2_Bred)
|
||||
.eliminatePartialMultifrontal(Ordering(C2_minus_B), function)
|
||||
.first;
|
||||
}
|
||||
gttoc(Full_root_factoring);
|
||||
|
||||
|
|
|
@ -83,10 +83,8 @@ struct ConstructorTraversalData {
|
|||
|
||||
Ordering keyAsOrdering;
|
||||
keyAsOrdering.push_back(ETreeNode->key);
|
||||
SymbolicConditional::shared_ptr myConditional;
|
||||
SymbolicFactor::shared_ptr mySeparatorFactor;
|
||||
std::tie(myConditional, mySeparatorFactor) = internal::EliminateSymbolic(
|
||||
symbolicFactors, keyAsOrdering);
|
||||
const auto [myConditional, mySeparatorFactor] =
|
||||
internal::EliminateSymbolic(symbolicFactors, keyAsOrdering);
|
||||
|
||||
// Store symbolic elimination results in the parent
|
||||
myData.parentData->childSymbolicConditionals.push_back(myConditional);
|
||||
|
|
|
@ -48,13 +48,8 @@ public:
|
|||
/* ************************************************************************* */
|
||||
template<class KEY>
|
||||
std::list<KEY> predecessorMap2Keys(const PredecessorMap<KEY>& p_map) {
|
||||
|
||||
typedef typename SGraph<KEY>::Vertex SVertex;
|
||||
|
||||
SGraph<KEY> g;
|
||||
SVertex root;
|
||||
std::map<KEY, SVertex> key2vertex;
|
||||
std::tie(g, root, key2vertex) = gtsam::predecessorMap2Graph<SGraph<KEY>, SVertex, KEY>(p_map);
|
||||
const auto [g, root, key2vertex] = gtsam::predecessorMap2Graph<SGraph<KEY>, SVertex, KEY>(p_map);
|
||||
|
||||
// breadth first visit on the graph
|
||||
std::list<KEY> keys;
|
||||
|
@ -121,9 +116,7 @@ predecessorMap2Graph(const PredecessorMap<KEY>& p_map) {
|
|||
std::map<KEY, V> key2vertex;
|
||||
V v1, v2, root;
|
||||
bool foundRoot = false;
|
||||
for(auto child_parent: p_map) {
|
||||
KEY child, parent;
|
||||
std::tie(child,parent) = child_parent;
|
||||
for(const auto& [child, parent]: p_map) {
|
||||
if (key2vertex.find(child) == key2vertex.end()) {
|
||||
v1 = add_vertex(child, g);
|
||||
key2vertex.emplace(child, v1);
|
||||
|
@ -180,7 +173,6 @@ std::shared_ptr<Values> composePoses(const G& graph, const PredecessorMap<KEY>&
|
|||
boost::property<boost::vertex_name_t, KEY>,
|
||||
boost::property<boost::edge_weight_t, POSE> > PoseGraph;
|
||||
typedef typename boost::graph_traits<PoseGraph>::vertex_descriptor PoseVertex;
|
||||
typedef typename boost::graph_traits<PoseGraph>::edge_descriptor PoseEdge;
|
||||
|
||||
PoseGraph g;
|
||||
PoseVertex root;
|
||||
|
@ -189,8 +181,6 @@ std::shared_ptr<Values> composePoses(const G& graph, const PredecessorMap<KEY>&
|
|||
predecessorMap2Graph<PoseGraph, PoseVertex, KEY>(tree);
|
||||
|
||||
// attach the relative poses to the edges
|
||||
PoseEdge edge12, edge21;
|
||||
bool found1, found2;
|
||||
for(typename G::sharedFactor nl_factor: graph) {
|
||||
|
||||
if (nl_factor->keys().size() > 2)
|
||||
|
@ -207,8 +197,8 @@ std::shared_ptr<Values> composePoses(const G& graph, const PredecessorMap<KEY>&
|
|||
PoseVertex v2 = key2vertex.find(key2)->second;
|
||||
|
||||
POSE l1Xl2 = factor->measured();
|
||||
std::tie(edge12, found1) = boost::edge(v1, v2, g);
|
||||
std::tie(edge21, found2) = boost::edge(v2, v1, g);
|
||||
const auto [edge12, found1] = boost::edge(v1, v2, g);
|
||||
const auto [edge21, found2] = boost::edge(v2, v1, g);
|
||||
if (found1 && found2) throw std::invalid_argument ("composePoses: invalid spanning tree");
|
||||
if (!found1 && !found2) continue;
|
||||
if (found1)
|
||||
|
|
|
@ -416,9 +416,7 @@ void HessianFactor::multiplyHessianAdd(double alpha, const VectorValues& x,
|
|||
|
||||
// copy to yvalues
|
||||
for (DenseIndex i = 0; i < (DenseIndex) size(); ++i) {
|
||||
bool didNotExist;
|
||||
VectorValues::iterator it;
|
||||
std::tie(it, didNotExist) = yvalues.tryInsert(keys_[i], Vector());
|
||||
const auto [it, didNotExist] = yvalues.tryInsert(keys_[i], Vector());
|
||||
if (didNotExist)
|
||||
it->second = alpha * y[i]; // init
|
||||
else
|
||||
|
|
|
@ -96,9 +96,7 @@ JacobianFactor::JacobianFactor(const HessianFactor& factor)
|
|||
Ab_.full() = factor.info().selfadjointView();
|
||||
|
||||
// Do Cholesky to get a Jacobian
|
||||
size_t maxrank;
|
||||
bool success;
|
||||
std::tie(maxrank, success) = choleskyCareful(Ab_.matrix());
|
||||
const auto [maxrank, success] = choleskyCareful(Ab_.matrix());
|
||||
|
||||
// Check that Cholesky succeeded OR it managed to factor the full Hessian.
|
||||
// THe latter case occurs with non-positive definite matrices arising from QP.
|
||||
|
@ -215,9 +213,7 @@ void JacobianFactor::JacobianFactorHelper(const GaussianFactorGraph& graph,
|
|||
graph);
|
||||
|
||||
// Count dimensions
|
||||
FastVector<DenseIndex> varDims;
|
||||
DenseIndex m, n;
|
||||
std::tie(varDims, m, n) = _countDims(jacobians, orderedSlots);
|
||||
const auto [varDims, m, n] = _countDims(jacobians, orderedSlots);
|
||||
|
||||
// Allocate matrix and copy keys in order
|
||||
gttic(allocate);
|
||||
|
|
|
@ -34,8 +34,7 @@ namespace gtsam {
|
|||
SubgraphSolver::SubgraphSolver(const GaussianFactorGraph &Ab,
|
||||
const Parameters ¶meters, const Ordering& ordering) :
|
||||
parameters_(parameters) {
|
||||
GaussianFactorGraph Ab1, Ab2;
|
||||
std::tie(Ab1, Ab2) = splitGraph(Ab);
|
||||
const auto [Ab1, Ab2] = splitGraph(Ab);
|
||||
if (parameters_.verbosity())
|
||||
cout << "Split A into (A1) " << Ab1.size() << " and (A2) " << Ab2.size()
|
||||
<< " factors" << endl;
|
||||
|
|
|
@ -50,8 +50,7 @@ static GaussianBayesNet noisyBayesNet = {
|
|||
/* ************************************************************************* */
|
||||
TEST( GaussianBayesNet, Matrix )
|
||||
{
|
||||
Matrix R; Vector d;
|
||||
std::tie(R,d) = smallBayesNet.matrix(); // find matrix and RHS
|
||||
const auto [R, d] = smallBayesNet.matrix(); // find matrix and RHS
|
||||
|
||||
Matrix R1 = (Matrix2() <<
|
||||
1.0, 1.0,
|
||||
|
@ -100,8 +99,7 @@ TEST(GaussianBayesNet, Evaluate2) {
|
|||
/* ************************************************************************* */
|
||||
TEST( GaussianBayesNet, NoisyMatrix )
|
||||
{
|
||||
Matrix R; Vector d;
|
||||
std::tie(R,d) = noisyBayesNet.matrix(); // find matrix and RHS
|
||||
const auto [R, d] = noisyBayesNet.matrix(); // find matrix and RHS
|
||||
|
||||
Matrix R1 = (Matrix2() <<
|
||||
0.5, 0.5,
|
||||
|
@ -123,9 +121,7 @@ TEST(GaussianBayesNet, Optimize) {
|
|||
|
||||
/* ************************************************************************* */
|
||||
TEST(GaussianBayesNet, NoisyOptimize) {
|
||||
Matrix R;
|
||||
Vector d;
|
||||
std::tie(R, d) = noisyBayesNet.matrix(); // find matrix and RHS
|
||||
const auto [R, d] = noisyBayesNet.matrix(); // find matrix and RHS
|
||||
const Vector x = R.inverse() * d;
|
||||
const VectorValues expected{{_x_, x.head(1)}, {_y_, x.tail(1)}};
|
||||
|
||||
|
@ -236,9 +232,7 @@ TEST( GaussianBayesNet, MatrixStress )
|
|||
KeyVector({_y_, _x_, _z_}), KeyVector({_y_, _z_, _x_}),
|
||||
KeyVector({_z_, _x_, _y_}), KeyVector({_z_, _y_, _x_})}) {
|
||||
const Ordering ordering(keys);
|
||||
Matrix R;
|
||||
Vector d;
|
||||
std::tie(R, d) = bn.matrix(ordering);
|
||||
const auto [R, d] = bn.matrix(ordering);
|
||||
EXPECT(assert_equal(expected.vector(ordering), R.inverse() * d));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -155,20 +155,16 @@ TEST(GaussianFactorGraph, matrices) {
|
|||
// jacobian
|
||||
Matrix A = Ab.leftCols(Ab.cols() - 1);
|
||||
Vector b = Ab.col(Ab.cols() - 1);
|
||||
Matrix actualA;
|
||||
Vector actualb;
|
||||
std::tie(actualA, actualb) = gfg.jacobian();
|
||||
const auto [actualA, actualb] = gfg.jacobian();
|
||||
EXPECT(assert_equal(A, actualA));
|
||||
EXPECT(assert_equal(b, actualb));
|
||||
|
||||
// hessian
|
||||
Matrix L = A.transpose() * A;
|
||||
Vector eta = A.transpose() * b;
|
||||
Matrix actualL;
|
||||
Vector actualeta;
|
||||
std::tie(actualL, actualeta) = gfg.hessian();
|
||||
const auto [actualL, actualEta] = gfg.hessian();
|
||||
EXPECT(assert_equal(L, actualL));
|
||||
EXPECT(assert_equal(eta, actualeta));
|
||||
EXPECT(assert_equal(eta, actualEta));
|
||||
|
||||
// hessianBlockDiagonal
|
||||
VectorValues expectLdiagonal; // Make explicit that diagonal is sum-squares of columns
|
||||
|
@ -261,12 +257,8 @@ TEST(GaussianFactorGraph, eliminate_empty) {
|
|||
/* ************************************************************************* */
|
||||
TEST(GaussianFactorGraph, matrices2) {
|
||||
GaussianFactorGraph gfg = createSimpleGaussianFactorGraph();
|
||||
Matrix A;
|
||||
Vector b;
|
||||
std::tie(A, b) = gfg.jacobian();
|
||||
Matrix AtA;
|
||||
Vector eta;
|
||||
std::tie(AtA, eta) = gfg.hessian();
|
||||
const auto [A, b] = gfg.jacobian();
|
||||
const auto [AtA, eta] = gfg.hessian();
|
||||
EXPECT(assert_equal(A.transpose() * A, AtA));
|
||||
EXPECT(assert_equal(A.transpose() * b, eta));
|
||||
Matrix expectedAtA(6, 6);
|
||||
|
@ -312,9 +304,7 @@ TEST(GaussianFactorGraph, multiplyHessianAdd2) {
|
|||
GaussianFactorGraph gfg = createGaussianFactorGraphWithHessianFactor();
|
||||
|
||||
// brute force
|
||||
Matrix AtA;
|
||||
Vector eta;
|
||||
std::tie(AtA, eta) = gfg.hessian();
|
||||
const auto [AtA, eta] = gfg.hessian();
|
||||
Vector X(6);
|
||||
X << 1, 2, 3, 4, 5, 6;
|
||||
Vector Y(6);
|
||||
|
@ -339,12 +329,8 @@ TEST(GaussianFactorGraph, multiplyHessianAdd2) {
|
|||
/* ************************************************************************* */
|
||||
TEST(GaussianFactorGraph, matricesMixed) {
|
||||
GaussianFactorGraph gfg = createGaussianFactorGraphWithHessianFactor();
|
||||
Matrix A;
|
||||
Vector b;
|
||||
std::tie(A, b) = gfg.jacobian(); // incorrect !
|
||||
Matrix AtA;
|
||||
Vector eta;
|
||||
std::tie(AtA, eta) = gfg.hessian(); // correct
|
||||
const auto [A, b] = gfg.jacobian(); // incorrect !
|
||||
const auto [AtA, eta] = gfg.hessian(); // correct
|
||||
EXPECT(assert_equal(A.transpose() * A, AtA));
|
||||
Vector expected = -(Vector(6) << -25, 17.5, 5, -13.5, 29, 4).finished();
|
||||
EXPECT(assert_equal(expected, eta));
|
||||
|
|
|
@ -293,15 +293,11 @@ TEST(HessianFactor, CombineAndEliminate1) {
|
|||
|
||||
// perform elimination on jacobian
|
||||
Ordering ordering {1};
|
||||
GaussianConditional::shared_ptr expectedConditional;
|
||||
JacobianFactor::shared_ptr expectedFactor;
|
||||
std::tie(expectedConditional, expectedFactor) = jacobian.eliminate(ordering);
|
||||
const auto [expectedConditional, expectedFactor] = jacobian.eliminate(ordering);
|
||||
CHECK(expectedFactor);
|
||||
|
||||
// Eliminate
|
||||
GaussianConditional::shared_ptr actualConditional;
|
||||
HessianFactor::shared_ptr actualHessian;
|
||||
std::tie(actualConditional, actualHessian) = //
|
||||
const auto [actualConditional, actualHessian] = //
|
||||
EliminateCholesky(gfg, ordering);
|
||||
actualConditional->setModel(false,Vector3(1,1,1)); // add a unit model for comparison
|
||||
|
||||
|
@ -356,15 +352,11 @@ TEST(HessianFactor, CombineAndEliminate2) {
|
|||
|
||||
// perform elimination on jacobian
|
||||
Ordering ordering {0};
|
||||
GaussianConditional::shared_ptr expectedConditional;
|
||||
JacobianFactor::shared_ptr expectedFactor;
|
||||
std::tie(expectedConditional, expectedFactor) = //
|
||||
const auto [expectedConditional, expectedFactor] =
|
||||
jacobian.eliminate(ordering);
|
||||
|
||||
// Eliminate
|
||||
GaussianConditional::shared_ptr actualConditional;
|
||||
HessianFactor::shared_ptr actualHessian;
|
||||
std::tie(actualConditional, actualHessian) = //
|
||||
const auto [actualConditional, actualHessian] = //
|
||||
EliminateCholesky(gfg, ordering);
|
||||
actualConditional->setModel(false,Vector3(1,1,1)); // add a unit model for comparison
|
||||
|
||||
|
@ -498,7 +490,7 @@ TEST(HessianFactor, gradientAtZero)
|
|||
|
||||
// test gradient at zero
|
||||
VectorValues expectedG{{0, -g1}, {1, -g2}};
|
||||
Matrix A; Vector b; std::tie(A,b) = factor.jacobian();
|
||||
const auto [A, b] = factor.jacobian();
|
||||
KeyVector keys {0, 1};
|
||||
EXPECT(assert_equal(-A.transpose()*b, expectedG.vector(keys)));
|
||||
VectorValues actualG = factor.gradientAtZero();
|
||||
|
|
|
@ -368,7 +368,7 @@ TEST(JacobianFactor, operators )
|
|||
EXPECT(assert_equal(expectedX, actualX));
|
||||
|
||||
// test gradient at zero
|
||||
Matrix A; Vector b2; std::tie(A,b2) = lf.jacobian();
|
||||
const auto [A, b2] = lf.jacobian();
|
||||
VectorValues expectedG;
|
||||
expectedG.insert(1, Vector2(20,-10));
|
||||
expectedG.insert(2, Vector2(-20, 10));
|
||||
|
|
|
@ -123,9 +123,7 @@ TEST(GPSData, init) {
|
|||
Point3 NED2(N, E, -U);
|
||||
|
||||
// Estimate initial state
|
||||
Pose3 T;
|
||||
Vector3 nV;
|
||||
std::tie(T, nV) = GPSFactor::EstimateState(t1, NED1, t2, NED2, 84831.0796);
|
||||
const auto [T, nV] = GPSFactor::EstimateState(t1, NED1, t2, NED2, 84831.0796);
|
||||
|
||||
// Check values values
|
||||
EXPECT(assert_equal((Vector )Vector3(29.9575, -29.0564, -1.95993), nV, 1e-4));
|
||||
|
|
|
@ -143,9 +143,7 @@ T Expression<T>::value(const Values& values,
|
|||
std::vector<Matrix>* H) const {
|
||||
if (H) {
|
||||
// Call private version that returns derivatives in H
|
||||
KeyVector keys;
|
||||
FastVector<int> dims;
|
||||
std::tie(keys, dims) = keysAndDims();
|
||||
const auto [keys, dims] = keysAndDims();
|
||||
return valueAndDerivatives(values, keys, dims, *H);
|
||||
} else
|
||||
// no derivatives needed, just return value
|
||||
|
|
|
@ -330,9 +330,7 @@ void ISAM2Clique::addGradientAtZero(VectorValues* g) const {
|
|||
for (auto it = conditional_->begin(); it != conditional_->end(); ++it) {
|
||||
const DenseIndex dim = conditional_->getDim(it);
|
||||
const Vector contribution = gradientContribution_.segment(position, dim);
|
||||
VectorValues::iterator values_it;
|
||||
bool success;
|
||||
std::tie(values_it, success) = g->tryInsert(*it, contribution);
|
||||
auto [values_it, success] = g->tryInsert(*it, contribution);
|
||||
if (!success) values_it->second += contribution;
|
||||
position += dim;
|
||||
}
|
||||
|
|
|
@ -63,9 +63,7 @@ NonlinearConjugateGradientOptimizer::System::State NonlinearConjugateGradientOpt
|
|||
}
|
||||
|
||||
GaussianFactorGraph::shared_ptr NonlinearConjugateGradientOptimizer::iterate() {
|
||||
Values newValues;
|
||||
int dummy;
|
||||
std::tie(newValues, dummy) = nonlinearConjugateGradient<System, Values>(
|
||||
const auto [newValues, dummy] = nonlinearConjugateGradient<System, Values>(
|
||||
System(graph_), state_->values, params_, true /* single iteration */);
|
||||
state_.reset(new State(newValues, graph_.error(newValues), state_->iterations + 1));
|
||||
|
||||
|
@ -76,9 +74,7 @@ GaussianFactorGraph::shared_ptr NonlinearConjugateGradientOptimizer::iterate() {
|
|||
const Values& NonlinearConjugateGradientOptimizer::optimize() {
|
||||
// Optimize until convergence
|
||||
System system(graph_);
|
||||
Values newValues;
|
||||
int iterations;
|
||||
std::tie(newValues, iterations) =
|
||||
const auto [newValues, iterations] =
|
||||
nonlinearConjugateGradient(system, state_->values, params_, false);
|
||||
state_.reset(new State(std::move(newValues), graph_.error(newValues), iterations));
|
||||
return state_->values;
|
||||
|
|
|
@ -159,7 +159,7 @@ std::tuple<V, int> nonlinearConjugateGradient(const S &system,
|
|||
std::cout << "Exiting, as error = " << currentError << " < "
|
||||
<< params.errorTol << std::endl;
|
||||
}
|
||||
return std::tie(initial, iteration);
|
||||
return {initial, iteration};
|
||||
}
|
||||
|
||||
V currentValues = initial;
|
||||
|
@ -217,7 +217,7 @@ std::tuple<V, int> nonlinearConjugateGradient(const S &system,
|
|||
<< "nonlinearConjugateGradient: Terminating because reached maximum iterations"
|
||||
<< std::endl;
|
||||
|
||||
return std::tie(currentValues, iteration);
|
||||
return {currentValues, iteration};
|
||||
}
|
||||
|
||||
} // \ namespace gtsam
|
||||
|
|
|
@ -179,10 +179,8 @@ ShonanAveraging<d>::createOptimizerAt(size_t p, const Values &initial) const {
|
|||
|
||||
// Anchor prior is added here as depends on initial value (and cost is zero)
|
||||
if (parameters_.alpha > 0) {
|
||||
size_t i;
|
||||
Rot value;
|
||||
const size_t dim = SOn::Dimension(p);
|
||||
std::tie(i, value) = parameters_.anchor;
|
||||
const auto [i, value] = parameters_.anchor;
|
||||
auto model = noiseModel::Isotropic::Precision(dim, parameters_.alpha);
|
||||
graph.emplace_shared<PriorFactor<SOn>>(i, SOn::Lift(p, value.matrix()),
|
||||
model);
|
||||
|
|
|
@ -44,11 +44,9 @@ public:
|
|||
//gfg.print("gfg");
|
||||
|
||||
// eliminate the point
|
||||
std::shared_ptr<GaussianBayesNet> bn;
|
||||
GaussianFactorGraph::shared_ptr fg;
|
||||
KeyVector variables;
|
||||
variables.push_back(pointKey);
|
||||
std::tie(bn, fg) = gfg.eliminatePartialSequential(variables, EliminateQR);
|
||||
const auto [bn, fg] = gfg.eliminatePartialSequential(variables, EliminateQR);
|
||||
//fg->print("fg");
|
||||
|
||||
JacobianFactor::operator=(JacobianFactor(*fg));
|
||||
|
|
|
@ -92,9 +92,7 @@ TEST( dataSet, parseEdge)
|
|||
TEST(dataSet, load2D) {
|
||||
///< The structure where we will save the SfM data
|
||||
const string filename = findExampleDataFile("w100.graph");
|
||||
NonlinearFactorGraph::shared_ptr graph;
|
||||
Values::shared_ptr initial;
|
||||
std::tie(graph, initial) = load2D(filename);
|
||||
const auto [graph, initial] = load2D(filename);
|
||||
EXPECT_LONGS_EQUAL(300, graph->size());
|
||||
EXPECT_LONGS_EQUAL(100, initial->size());
|
||||
auto model = noiseModel::Unit::Create(3);
|
||||
|
@ -135,20 +133,17 @@ TEST(dataSet, load2D) {
|
|||
/* ************************************************************************* */
|
||||
TEST(dataSet, load2DVictoriaPark) {
|
||||
const string filename = findExampleDataFile("victoria_park.txt");
|
||||
NonlinearFactorGraph::shared_ptr graph;
|
||||
Values::shared_ptr initial;
|
||||
|
||||
// Load all
|
||||
std::tie(graph, initial) = load2D(filename);
|
||||
EXPECT_LONGS_EQUAL(10608, graph->size());
|
||||
EXPECT_LONGS_EQUAL(7120, initial->size());
|
||||
const auto [graph1, initial1] = load2D(filename);
|
||||
EXPECT_LONGS_EQUAL(10608, graph1->size());
|
||||
EXPECT_LONGS_EQUAL(7120, initial1->size());
|
||||
|
||||
// Restrict keys
|
||||
size_t maxIndex = 5;
|
||||
std::tie(graph, initial) = load2D(filename, nullptr, maxIndex);
|
||||
EXPECT_LONGS_EQUAL(5, graph->size());
|
||||
EXPECT_LONGS_EQUAL(6, initial->size()); // file has 0 as well
|
||||
EXPECT_LONGS_EQUAL(L(5), graph->at(4)->keys()[1]);
|
||||
const auto [graph2, initial2] = load2D(filename, nullptr, maxIndex);
|
||||
EXPECT_LONGS_EQUAL(5, graph2->size());
|
||||
EXPECT_LONGS_EQUAL(6, initial2->size()); // file has 0 as well
|
||||
EXPECT_LONGS_EQUAL(L(5), graph2->at(4)->keys()[1]);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -218,10 +213,8 @@ TEST(dataSet, readG2o3D) {
|
|||
}
|
||||
|
||||
// Check graph version
|
||||
NonlinearFactorGraph::shared_ptr actualGraph;
|
||||
Values::shared_ptr actualValues;
|
||||
bool is3D = true;
|
||||
std::tie(actualGraph, actualValues) = readG2o(g2oFile, is3D);
|
||||
const auto [actualGraph, actualValues] = readG2o(g2oFile, is3D);
|
||||
EXPECT(assert_equal(expectedGraph, *actualGraph, 1e-5));
|
||||
for (size_t j : {0, 1, 2, 3, 4}) {
|
||||
EXPECT(assert_equal(poses[j], actualValues->at<Pose3>(j), 1e-5));
|
||||
|
@ -232,10 +225,8 @@ TEST(dataSet, readG2o3D) {
|
|||
TEST( dataSet, readG2o3DNonDiagonalNoise)
|
||||
{
|
||||
const string g2oFile = findExampleDataFile("pose3example-offdiagonal.txt");
|
||||
NonlinearFactorGraph::shared_ptr actualGraph;
|
||||
Values::shared_ptr actualValues;
|
||||
bool is3D = true;
|
||||
std::tie(actualGraph, actualValues) = readG2o(g2oFile, is3D);
|
||||
const auto [actualGraph, actualValues] = readG2o(g2oFile, is3D);
|
||||
|
||||
Values expectedValues;
|
||||
Rot3 R0 = Rot3::Quaternion(1.000000, 0.000000, 0.000000, 0.000000 );
|
||||
|
@ -327,9 +318,7 @@ static NonlinearFactorGraph expectedGraph(const SharedNoiseModel& model) {
|
|||
/* ************************************************************************* */
|
||||
TEST(dataSet, readG2o) {
|
||||
const string g2oFile = findExampleDataFile("pose2example");
|
||||
NonlinearFactorGraph::shared_ptr actualGraph;
|
||||
Values::shared_ptr actualValues;
|
||||
std::tie(actualGraph, actualValues) = readG2o(g2oFile);
|
||||
const auto [actualGraph, actualValues] = readG2o(g2oFile);
|
||||
|
||||
auto model = noiseModel::Diagonal::Precisions(
|
||||
Vector3(44.721360, 44.721360, 30.901699));
|
||||
|
@ -353,10 +342,8 @@ TEST(dataSet, readG2o) {
|
|||
/* ************************************************************************* */
|
||||
TEST(dataSet, readG2oHuber) {
|
||||
const string g2oFile = findExampleDataFile("pose2example");
|
||||
NonlinearFactorGraph::shared_ptr actualGraph;
|
||||
Values::shared_ptr actualValues;
|
||||
bool is3D = false;
|
||||
std::tie(actualGraph, actualValues) =
|
||||
const auto [actualGraph, actualValues] =
|
||||
readG2o(g2oFile, is3D, KernelFunctionTypeHUBER);
|
||||
|
||||
auto baseModel = noiseModel::Diagonal::Precisions(
|
||||
|
@ -370,10 +357,8 @@ TEST(dataSet, readG2oHuber) {
|
|||
/* ************************************************************************* */
|
||||
TEST(dataSet, readG2oTukey) {
|
||||
const string g2oFile = findExampleDataFile("pose2example");
|
||||
NonlinearFactorGraph::shared_ptr actualGraph;
|
||||
Values::shared_ptr actualValues;
|
||||
bool is3D = false;
|
||||
std::tie(actualGraph, actualValues) =
|
||||
const auto [actualGraph, actualValues] =
|
||||
readG2o(g2oFile, is3D, KernelFunctionTypeTUKEY);
|
||||
|
||||
auto baseModel = noiseModel::Diagonal::Precisions(
|
||||
|
@ -388,16 +373,12 @@ TEST(dataSet, readG2oTukey) {
|
|||
TEST( dataSet, writeG2o)
|
||||
{
|
||||
const string g2oFile = findExampleDataFile("pose2example");
|
||||
NonlinearFactorGraph::shared_ptr expectedGraph;
|
||||
Values::shared_ptr expectedValues;
|
||||
std::tie(expectedGraph, expectedValues) = readG2o(g2oFile);
|
||||
const auto [expectedGraph, expectedValues] = readG2o(g2oFile);
|
||||
|
||||
const string filenameToWrite = createRewrittenFileName(g2oFile);
|
||||
writeG2o(*expectedGraph, *expectedValues, filenameToWrite);
|
||||
|
||||
NonlinearFactorGraph::shared_ptr actualGraph;
|
||||
Values::shared_ptr actualValues;
|
||||
std::tie(actualGraph, actualValues) = readG2o(filenameToWrite);
|
||||
const auto [actualGraph, actualValues] = readG2o(filenameToWrite);
|
||||
EXPECT(assert_equal(*expectedValues,*actualValues,1e-5));
|
||||
EXPECT(assert_equal(*expectedGraph,*actualGraph,1e-5));
|
||||
}
|
||||
|
@ -406,17 +387,13 @@ TEST( dataSet, writeG2o)
|
|||
TEST( dataSet, writeG2o3D)
|
||||
{
|
||||
const string g2oFile = findExampleDataFile("pose3example");
|
||||
NonlinearFactorGraph::shared_ptr expectedGraph;
|
||||
Values::shared_ptr expectedValues;
|
||||
bool is3D = true;
|
||||
std::tie(expectedGraph, expectedValues) = readG2o(g2oFile, is3D);
|
||||
const auto [expectedGraph, expectedValues] = readG2o(g2oFile, is3D);
|
||||
|
||||
const string filenameToWrite = createRewrittenFileName(g2oFile);
|
||||
writeG2o(*expectedGraph, *expectedValues, filenameToWrite);
|
||||
|
||||
NonlinearFactorGraph::shared_ptr actualGraph;
|
||||
Values::shared_ptr actualValues;
|
||||
std::tie(actualGraph, actualValues) = readG2o(filenameToWrite, is3D);
|
||||
const auto [actualGraph, actualValues] = readG2o(filenameToWrite, is3D);
|
||||
EXPECT(assert_equal(*expectedValues,*actualValues,1e-4));
|
||||
EXPECT(assert_equal(*expectedGraph,*actualGraph,1e-4));
|
||||
}
|
||||
|
@ -425,17 +402,13 @@ TEST( dataSet, writeG2o3D)
|
|||
TEST( dataSet, writeG2o3DNonDiagonalNoise)
|
||||
{
|
||||
const string g2oFile = findExampleDataFile("pose3example-offdiagonal");
|
||||
NonlinearFactorGraph::shared_ptr expectedGraph;
|
||||
Values::shared_ptr expectedValues;
|
||||
bool is3D = true;
|
||||
std::tie(expectedGraph, expectedValues) = readG2o(g2oFile, is3D);
|
||||
const auto [expectedGraph, expectedValues] = readG2o(g2oFile, is3D);
|
||||
|
||||
const string filenameToWrite = createRewrittenFileName(g2oFile);
|
||||
writeG2o(*expectedGraph, *expectedValues, filenameToWrite);
|
||||
|
||||
NonlinearFactorGraph::shared_ptr actualGraph;
|
||||
Values::shared_ptr actualValues;
|
||||
std::tie(actualGraph, actualValues) = readG2o(filenameToWrite, is3D);
|
||||
const auto [actualGraph, actualValues] = readG2o(filenameToWrite, is3D);
|
||||
EXPECT(assert_equal(*expectedValues,*actualValues,1e-4));
|
||||
EXPECT(assert_equal(*expectedGraph,*actualGraph,1e-4));
|
||||
}
|
||||
|
|
|
@ -33,10 +33,8 @@ using namespace gtsam;
|
|||
/* ************************************************************************* */
|
||||
TEST(InitializePose3, computePoses2D) {
|
||||
const string g2oFile = findExampleDataFile("noisyToyGraph.txt");
|
||||
NonlinearFactorGraph::shared_ptr inputGraph;
|
||||
Values::shared_ptr posesInFile;
|
||||
bool is3D = false;
|
||||
std::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D);
|
||||
const auto [inputGraph, posesInFile] = readG2o(g2oFile, is3D);
|
||||
|
||||
auto priorModel = noiseModel::Unit::Create(3);
|
||||
inputGraph->addPrior(0, posesInFile->at<Pose2>(0), priorModel);
|
||||
|
@ -56,10 +54,8 @@ TEST(InitializePose3, computePoses2D) {
|
|||
/* ************************************************************************* */
|
||||
TEST(InitializePose3, computePoses3D) {
|
||||
const string g2oFile = findExampleDataFile("Klaus3");
|
||||
NonlinearFactorGraph::shared_ptr inputGraph;
|
||||
Values::shared_ptr posesInFile;
|
||||
bool is3D = true;
|
||||
std::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D);
|
||||
const auto [inputGraph, posesInFile] = readG2o(g2oFile, is3D);
|
||||
|
||||
auto priorModel = noiseModel::Unit::Create(6);
|
||||
inputGraph->addPrior(0, posesInFile->at<Pose3>(0), priorModel);
|
||||
|
|
|
@ -231,10 +231,8 @@ TEST( InitializePose3, orientationsGradient ) {
|
|||
// writeG2o(pose3Graph, givenPoses, g2oFile);
|
||||
|
||||
const string matlabResultsfile = findExampleDataFile("simpleGraph10gradIter");
|
||||
NonlinearFactorGraph::shared_ptr matlabGraph;
|
||||
Values::shared_ptr matlabValues;
|
||||
bool is3D = true;
|
||||
std::tie(matlabGraph, matlabValues) = readG2o(matlabResultsfile, is3D);
|
||||
const auto [matlabGraph, matlabValues] = readG2o(matlabResultsfile, is3D);
|
||||
|
||||
Rot3 R0Expected = matlabValues->at<Pose3>(1).rotation();
|
||||
EXPECT(assert_equal(R0Expected, orientations.at<Rot3>(x0), 1e-4));
|
||||
|
@ -266,10 +264,8 @@ TEST( InitializePose3, posesWithGivenGuess ) {
|
|||
/* ************************************************************************* */
|
||||
TEST(InitializePose3, initializePoses) {
|
||||
const string g2oFile = findExampleDataFile("pose3example-grid");
|
||||
NonlinearFactorGraph::shared_ptr inputGraph;
|
||||
Values::shared_ptr posesInFile;
|
||||
bool is3D = true;
|
||||
std::tie(inputGraph, posesInFile) = readG2o(g2oFile, is3D);
|
||||
const auto [inputGraph, posesInFile] = readG2o(g2oFile, is3D);
|
||||
|
||||
auto priorModel = noiseModel::Unit::Create(6);
|
||||
inputGraph->addPrior(0, Pose3(), priorModel);
|
||||
|
|
|
@ -258,9 +258,7 @@ TEST( Lago, smallGraph2 ) {
|
|||
TEST( Lago, largeGraphNoisy_orientations ) {
|
||||
|
||||
string inputFile = findExampleDataFile("noisyToyGraph");
|
||||
NonlinearFactorGraph::shared_ptr g;
|
||||
Values::shared_ptr initial;
|
||||
std::tie(g, initial) = readG2o(inputFile);
|
||||
const auto [g, initial] = readG2o(inputFile);
|
||||
|
||||
// Add prior on the pose having index (key) = 0
|
||||
NonlinearFactorGraph graphWithPrior = *g;
|
||||
|
@ -279,9 +277,7 @@ TEST( Lago, largeGraphNoisy_orientations ) {
|
|||
}
|
||||
}
|
||||
string matlabFile = findExampleDataFile("orientationsNoisyToyGraph");
|
||||
NonlinearFactorGraph::shared_ptr gmatlab;
|
||||
Values::shared_ptr expected;
|
||||
std::tie(gmatlab, expected) = readG2o(matlabFile);
|
||||
const auto [gmatlab, expected] = readG2o(matlabFile);
|
||||
|
||||
for(const auto& key_pose: expected->extract<Pose2>()){
|
||||
const Key& k = key_pose.first;
|
||||
|
@ -294,9 +290,7 @@ TEST( Lago, largeGraphNoisy_orientations ) {
|
|||
TEST( Lago, largeGraphNoisy ) {
|
||||
|
||||
string inputFile = findExampleDataFile("noisyToyGraph");
|
||||
NonlinearFactorGraph::shared_ptr g;
|
||||
Values::shared_ptr initial;
|
||||
std::tie(g, initial) = readG2o(inputFile);
|
||||
const auto [g, initial] = readG2o(inputFile);
|
||||
|
||||
// Add prior on the pose having index (key) = 0
|
||||
NonlinearFactorGraph graphWithPrior = *g;
|
||||
|
@ -306,9 +300,7 @@ TEST( Lago, largeGraphNoisy ) {
|
|||
Values actual = lago::initialize(graphWithPrior);
|
||||
|
||||
string matlabFile = findExampleDataFile("optimizedNoisyToyGraph");
|
||||
NonlinearFactorGraph::shared_ptr gmatlab;
|
||||
Values::shared_ptr expected;
|
||||
std::tie(gmatlab, expected) = readG2o(matlabFile);
|
||||
const auto [gmatlab, expected] = readG2o(matlabFile);
|
||||
|
||||
for(const auto& key_pose: expected->extract<Pose2>()){
|
||||
const Key& k = key_pose.first;
|
||||
|
|
|
@ -74,9 +74,7 @@ TEST(SymbolicFactor, EliminateSymbolic)
|
|||
const SymbolicConditional expectedConditional =
|
||||
SymbolicConditional::FromKeys(KeyVector{0,1,2,3,4,5,6}, 4);
|
||||
|
||||
SymbolicFactor::shared_ptr actualFactor;
|
||||
SymbolicConditional::shared_ptr actualConditional;
|
||||
std::tie(actualConditional, actualFactor) =
|
||||
const auto [actualConditional, actualFactor] =
|
||||
EliminateSymbolic(factors, Ordering{0, 1, 2, 3});
|
||||
|
||||
CHECK(assert_equal(expectedConditional, *actualConditional));
|
||||
|
|
|
@ -79,8 +79,6 @@ class LoopyBelief {
|
|||
DiscreteFactorGraph::shared_ptr iterate(
|
||||
const std::map<Key, DiscreteKey>& allDiscreteKeys) {
|
||||
static const bool debug = false;
|
||||
static DiscreteConditional::shared_ptr
|
||||
dummyCond; // unused by-product of elimination
|
||||
DiscreteFactorGraph::shared_ptr beliefs(new DiscreteFactorGraph());
|
||||
std::map<Key, std::map<Key, DiscreteFactor::shared_ptr> > allMessages;
|
||||
// Eliminate each star graph
|
||||
|
@ -99,8 +97,7 @@ class LoopyBelief {
|
|||
subGraph.push_back(starGraphs_.at(key).star->at(factor));
|
||||
}
|
||||
if (debug) subGraph.print("------- Subgraph:");
|
||||
DiscreteFactor::shared_ptr message;
|
||||
std::tie(dummyCond, message) =
|
||||
const auto [dummyCond, message] =
|
||||
EliminateDiscrete(subGraph, Ordering{neighbor});
|
||||
// store the new factor into messages
|
||||
messages.insert(make_pair(neighbor, message));
|
||||
|
|
|
@ -130,9 +130,7 @@ TEST(InvDepthFactor, backproject)
|
|||
InvDepthCamera3<Cal3_S2> inv_camera(level_pose,K);
|
||||
Point2 z = inv_camera.project(expected, inv_depth);
|
||||
|
||||
Vector5 actual_vec;
|
||||
double actual_inv;
|
||||
std::tie(actual_vec, actual_inv) = inv_camera.backproject(z, 4);
|
||||
const auto [actual_vec, actual_inv] = inv_camera.backproject(z, 4);
|
||||
EXPECT(assert_equal(expected,actual_vec,1e-7));
|
||||
EXPECT_DOUBLES_EQUAL(inv_depth,actual_inv,1e-7);
|
||||
}
|
||||
|
@ -146,9 +144,7 @@ TEST(InvDepthFactor, backproject2)
|
|||
InvDepthCamera3<Cal3_S2> inv_camera(Pose3(Rot3::Ypr(1.5,0.1, -1.5), Point3(-5, -5, 2)),K);
|
||||
Point2 z = inv_camera.project(expected, inv_depth);
|
||||
|
||||
Vector5 actual_vec;
|
||||
double actual_inv;
|
||||
std::tie(actual_vec, actual_inv) = inv_camera.backproject(z, 10);
|
||||
const auto [actual_vec, actual_inv] = inv_camera.backproject(z, 10);
|
||||
EXPECT(assert_equal(expected,actual_vec,1e-7));
|
||||
EXPECT_DOUBLES_EQUAL(inv_depth,actual_inv,1e-7);
|
||||
}
|
||||
|
|
|
@ -219,10 +219,8 @@ Template typename This::State This::iterate(
|
|||
} else {
|
||||
// If we CAN make some progress, i.e. p_k != 0
|
||||
// Adapt stepsize if some inactive constraints complain about this move
|
||||
double alpha;
|
||||
int factorIx;
|
||||
VectorValues p = newValues - state.values;
|
||||
std::tie(alpha, factorIx) = // using 16.41
|
||||
const auto [alpha, factorIx] = // using 16.41
|
||||
computeStepSize(state.workingSet, state.values, p, POLICY::maxAlpha);
|
||||
// also add to the working set the one that complains the most
|
||||
InequalityFactorGraph newWorkingSet = state.workingSet;
|
||||
|
|
|
@ -69,8 +69,7 @@ TEST(LPInitSolver, InfiniteLoopSingleVar) {
|
|||
LPSolver solver(lp);
|
||||
VectorValues starter;
|
||||
starter.insert(1, Vector3(0, 0, 2));
|
||||
VectorValues results, duals;
|
||||
std::tie(results, duals) = solver.optimize(starter);
|
||||
const auto [results, duals] = solver.optimize(starter);
|
||||
VectorValues expected;
|
||||
expected.insert(1, Vector3(13.5, 6.5, -6.5));
|
||||
CHECK(assert_equal(results, expected, 1e-7));
|
||||
|
@ -97,8 +96,7 @@ TEST(LPInitSolver, InfiniteLoopMultiVar) {
|
|||
starter.insert(X, kZero);
|
||||
starter.insert(Y, kZero);
|
||||
starter.insert(Z, Vector::Constant(1, 2.0));
|
||||
VectorValues results, duals;
|
||||
std::tie(results, duals) = solver.optimize(starter);
|
||||
const auto [results, duals] = solver.optimize(starter);
|
||||
VectorValues expected;
|
||||
expected.insert(X, Vector::Constant(1, 13.5));
|
||||
expected.insert(Y, Vector::Constant(1, 6.5));
|
||||
|
@ -197,8 +195,7 @@ TEST(LPSolver, SimpleTest1) {
|
|||
expected_x1.insert(1, Vector::Ones(2));
|
||||
CHECK(assert_equal(expected_x1, x1, 1e-10));
|
||||
|
||||
VectorValues result, duals;
|
||||
std::tie(result, duals) = lpSolver.optimize(init);
|
||||
const auto [result, duals] = lpSolver.optimize(init);
|
||||
VectorValues expectedResult;
|
||||
expectedResult.insert(1, Vector2(8. / 3., 2. / 3.));
|
||||
CHECK(assert_equal(expectedResult, result, 1e-10));
|
||||
|
@ -208,9 +205,9 @@ TEST(LPSolver, SimpleTest1) {
|
|||
TEST(LPSolver, TestWithoutInitialValues) {
|
||||
LP lp = simpleLP1();
|
||||
LPSolver lpSolver(lp);
|
||||
VectorValues result, duals, expectedResult;
|
||||
VectorValues expectedResult;
|
||||
expectedResult.insert(1, Vector2(8. / 3., 2. / 3.));
|
||||
std::tie(result, duals) = lpSolver.optimize();
|
||||
const auto [result, duals] = lpSolver.optimize();
|
||||
CHECK(assert_equal(expectedResult, result));
|
||||
}
|
||||
|
||||
|
|
|
@ -200,9 +200,7 @@ TEST(LinearEquality, operators) {
|
|||
EXPECT(assert_equal(expectedX, actualX));
|
||||
|
||||
// test gradient at zero
|
||||
Matrix A;
|
||||
Vector b2;
|
||||
std::tie(A, b2) = lf.jacobian();
|
||||
const auto [A, b2] = lf.jacobian();
|
||||
VectorValues expectedG;
|
||||
expectedG.insert(1, (Vector(2) << 0.2, -0.1).finished());
|
||||
expectedG.insert(2, (Vector(2) << -0.2, 0.1).finished());
|
||||
|
|
|
@ -249,9 +249,7 @@ namespace gtsam { namespace partition {
|
|||
prepareMetisGraph<GenericGraph>(graph, keys, workspace, &xadj, &adjncy, &adjwgt);
|
||||
|
||||
// run ND on the graph
|
||||
size_t sepsize;
|
||||
sharedInts part;
|
||||
std::tie(sepsize, part) = separatorMetis(numKeys, xadj, adjncy, adjwgt, verbose);
|
||||
const auto [sepsize, part] = separatorMetis(numKeys, xadj, adjncy, adjwgt, verbose);
|
||||
if (!sepsize) return std::optional<MetisResult>();
|
||||
|
||||
// convert the 0-1-2 from Metis to 1-2-0, so that the separator is 0, as later
|
||||
|
@ -309,9 +307,7 @@ namespace gtsam { namespace partition {
|
|||
prepareMetisGraph<GenericGraph>(graph, keys, workspace, &xadj, &adjncy, &adjwgt);
|
||||
|
||||
// run metis on the graph
|
||||
int edgecut;
|
||||
sharedInts part;
|
||||
std::tie(edgecut, part) = edgeMetis(numKeys, xadj, adjncy, adjwgt, verbose);
|
||||
const auto [edgecut, part] = edgeMetis(numKeys, xadj, adjncy, adjwgt, verbose);
|
||||
|
||||
// convert the 0-1-2 from Metis to 1-2-0, so that the separator is 0, as later we will have more submaps
|
||||
MetisResult result;
|
||||
|
|
|
@ -20,9 +20,7 @@ namespace gtsam { namespace partition {
|
|||
NestedDissection<NLG, SubNLG, GenericGraph>::NestedDissection(
|
||||
const NLG& fg, const Ordering& ordering, const int numNodeStopPartition, const int minNodesPerMap, const bool verbose) :
|
||||
fg_(fg), ordering_(ordering){
|
||||
GenericUnaryGraph unaryFactors;
|
||||
GenericGraph gfg;
|
||||
std::tie(unaryFactors, gfg) = fg.createGenericGraph(ordering);
|
||||
const auto [unaryFactors, gfg] = fg.createGenericGraph(ordering);
|
||||
|
||||
// build reverse mapping from integer to symbol
|
||||
int numNodes = ordering.size();
|
||||
|
@ -44,9 +42,7 @@ namespace gtsam { namespace partition {
|
|||
template <class NLG, class SubNLG, class GenericGraph>
|
||||
NestedDissection<NLG, SubNLG, GenericGraph>::NestedDissection(
|
||||
const NLG& fg, const Ordering& ordering, const std::shared_ptr<Cuts>& cuts, const bool verbose) : fg_(fg), ordering_(ordering){
|
||||
GenericUnaryGraph unaryFactors;
|
||||
GenericGraph gfg;
|
||||
std::tie(unaryFactors, gfg) = fg.createGenericGraph(ordering);
|
||||
const auto [unaryFactors, gfg] = fg.createGenericGraph(ordering);
|
||||
|
||||
// build reverse mapping from integer to symbol
|
||||
int numNodes = ordering.size();
|
||||
|
|
|
@ -72,9 +72,7 @@ TEST (AHRS, constructor) {
|
|||
// TODO make a testMechanization_bRn2
|
||||
TEST(AHRS, Mechanization_integrate) {
|
||||
AHRS ahrs = AHRS(stationaryU, stationaryF, g_e);
|
||||
Mechanization_bRn2 mech;
|
||||
KalmanFilter::State state;
|
||||
// std::tie(mech,state) = ahrs.initialize(g_e);
|
||||
// const auto [mech, state] = ahrs.initialize(g_e);
|
||||
// Vector u = Vector3(0.05, 0.0, 0.0);
|
||||
// double dt = 2;
|
||||
// Rot3 expected;
|
||||
|
|
|
@ -118,10 +118,8 @@ TEST( InvDepthFactor, Jacobian3D ) {
|
|||
Point2 expected_uv = level_camera.project(landmark);
|
||||
|
||||
// get expected landmark representation using backprojection
|
||||
double inv_depth;
|
||||
Vector5 inv_landmark;
|
||||
InvDepthCamera3<Cal3_S2> inv_camera(level_pose, K);
|
||||
std::tie(inv_landmark, inv_depth) = inv_camera.backproject(expected_uv, 5);
|
||||
const auto [inv_landmark, inv_depth] = inv_camera.backproject(expected_uv, 5);
|
||||
Vector5 expected_inv_landmark((Vector(5) << 0., 0., 1., 0., 0.).finished());
|
||||
|
||||
CHECK(assert_equal(expected_inv_landmark, inv_landmark, 1e-6));
|
||||
|
|
|
@ -463,10 +463,7 @@ inline std::pair<NonlinearFactorGraph, Values> createNonlinearSmoother(int T) {
|
|||
|
||||
/* ************************************************************************* */
|
||||
inline GaussianFactorGraph createSmoother(int T) {
|
||||
NonlinearFactorGraph nlfg;
|
||||
Values poses;
|
||||
std::tie(nlfg, poses) = createNonlinearSmoother(T);
|
||||
|
||||
const auto [nlfg, poses] = createNonlinearSmoother(T);
|
||||
return *nlfg.linearize(poses);
|
||||
}
|
||||
|
||||
|
|
|
@ -233,9 +233,7 @@ TEST(ExpressionFactor, Shallow) {
|
|||
Point2_ expression = project(transformTo(x_, p_));
|
||||
|
||||
// Get and check keys and dims
|
||||
KeyVector keys;
|
||||
FastVector<int> dims;
|
||||
std::tie(keys, dims) = expression.keysAndDims();
|
||||
const auto [keys, dims] = expression.keysAndDims();
|
||||
LONGS_EQUAL(2,keys.size());
|
||||
LONGS_EQUAL(2,dims.size());
|
||||
LONGS_EQUAL(1,keys[0]);
|
||||
|
|
|
@ -111,9 +111,7 @@ TEST(GaussianFactorGraph, eliminateOne_l1) {
|
|||
/* ************************************************************************* */
|
||||
TEST(GaussianFactorGraph, eliminateOne_x1_fast) {
|
||||
GaussianFactorGraph fg = createGaussianFactorGraph();
|
||||
GaussianConditional::shared_ptr conditional;
|
||||
JacobianFactor::shared_ptr remaining;
|
||||
std::tie(conditional, remaining) = EliminateQR(fg, Ordering{X(1)});
|
||||
const auto [conditional, remaining] = EliminateQR(fg, Ordering{X(1)});
|
||||
|
||||
// create expected Conditional Gaussian
|
||||
Matrix I = 15 * I_2x2, R11 = I, S12 = -0.111111 * I, S13 = -0.444444 * I;
|
||||
|
@ -289,9 +287,7 @@ TEST(GaussianFactorGraph, elimination) {
|
|||
GaussianBayesNet bayesNet = *fg.eliminateSequential();
|
||||
|
||||
// Check matrix
|
||||
Matrix R;
|
||||
Vector d;
|
||||
std::tie(R, d) = bayesNet.matrix();
|
||||
const auto [R, d] = bayesNet.matrix();
|
||||
Matrix expected =
|
||||
(Matrix(2, 2) << 0.707107, -0.353553, 0.0, 0.612372).finished();
|
||||
Matrix expected2 =
|
||||
|
|
|
@ -246,7 +246,6 @@ bool isam_check(const NonlinearFactorGraph& fullgraph, const Values& fullinit, c
|
|||
|
||||
// Check gradient at each node
|
||||
bool nodeGradientsOk = true;
|
||||
typedef ISAM2::sharedClique sharedClique;
|
||||
for (const auto& [key, clique] : isam.nodes()) {
|
||||
// Compute expected gradient
|
||||
GaussianFactorGraph jfg;
|
||||
|
@ -450,7 +449,6 @@ TEST(ISAM2, swapFactors)
|
|||
EXPECT(isam_check(fullgraph, fullinit, isam, *this, result_));
|
||||
|
||||
// Check gradient at each node
|
||||
typedef ISAM2::sharedClique sharedClique;
|
||||
for (const auto& [key, clique]: isam.nodes()) {
|
||||
// Compute expected gradient
|
||||
GaussianFactorGraph jfg;
|
||||
|
@ -575,7 +573,6 @@ TEST(ISAM2, constrained_ordering)
|
|||
EXPECT(isam_check(fullgraph, fullinit, isam, *this, result_));
|
||||
|
||||
// Check gradient at each node
|
||||
typedef ISAM2::sharedClique sharedClique;
|
||||
for (const auto& [key, clique]: isam.nodes()) {
|
||||
// Compute expected gradient
|
||||
GaussianFactorGraph jfg;
|
||||
|
|
|
@ -65,9 +65,7 @@ using symbol_shorthand::L;
|
|||
*/
|
||||
TEST( GaussianJunctionTreeB, constructor2 ) {
|
||||
// create a graph
|
||||
NonlinearFactorGraph nlfg;
|
||||
Values values;
|
||||
std::tie(nlfg, values) = createNonlinearSmoother(7);
|
||||
const auto [nlfg, values] = createNonlinearSmoother(7);
|
||||
SymbolicFactorGraph::shared_ptr symbolic = nlfg.symbolic();
|
||||
|
||||
// linearize
|
||||
|
@ -125,43 +123,35 @@ TEST( GaussianJunctionTreeB, constructor2 ) {
|
|||
}
|
||||
|
||||
///* ************************************************************************* */
|
||||
//TEST( GaussianJunctionTreeB, optimizeMultiFrontal )
|
||||
//{
|
||||
// // create a graph
|
||||
// GaussianFactorGraph fg;
|
||||
// Ordering ordering;
|
||||
// std::tie(fg,ordering) = createSmoother(7);
|
||||
//
|
||||
// // optimize the graph
|
||||
// GaussianJunctionTree tree(fg);
|
||||
// VectorValues actual = tree.optimize(&EliminateQR);
|
||||
//
|
||||
// // verify
|
||||
// VectorValues expected(vector<size_t>(7,2)); // expected solution
|
||||
// Vector v = Vector2(0., 0.);
|
||||
// for (int i=1; i<=7; i++)
|
||||
// expected[ordering[X(i)]] = v;
|
||||
// EXPECT(assert_equal(expected,actual));
|
||||
//}
|
||||
//
|
||||
///* ************************************************************************* */
|
||||
//TEST( GaussianJunctionTreeB, optimizeMultiFrontal2)
|
||||
//{
|
||||
// // create a graph
|
||||
// example::Graph nlfg = createNonlinearFactorGraph();
|
||||
// Values noisy = createNoisyValues();
|
||||
// Ordering ordering; ordering += X(1),X(2),L(1);
|
||||
// GaussianFactorGraph fg = *nlfg.linearize(noisy, ordering);
|
||||
//
|
||||
// // optimize the graph
|
||||
// GaussianJunctionTree tree(fg);
|
||||
// VectorValues actual = tree.optimize(&EliminateQR);
|
||||
//
|
||||
// // verify
|
||||
// VectorValues expected = createCorrectDelta(ordering); // expected solution
|
||||
// EXPECT(assert_equal(expected,actual));
|
||||
//}
|
||||
//
|
||||
TEST(GaussianJunctionTreeB, OptimizeMultiFrontal) {
|
||||
// create a graph
|
||||
const auto fg = createSmoother(7);
|
||||
|
||||
// optimize the graph
|
||||
const VectorValues actual = fg.optimize(&EliminateQR);
|
||||
|
||||
// verify
|
||||
VectorValues expected;
|
||||
const Vector v = Vector2(0., 0.);
|
||||
for (int i = 1; i <= 7; i++) expected.emplace(X(i), v);
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(GaussianJunctionTreeB, optimizeMultiFrontal2) {
|
||||
// create a graph
|
||||
const auto nlfg = createNonlinearFactorGraph();
|
||||
const auto noisy = createNoisyValues();
|
||||
const auto fg = *nlfg.linearize(noisy);
|
||||
|
||||
// optimize the graph
|
||||
VectorValues actual = fg.optimize(&EliminateQR);
|
||||
|
||||
// verify
|
||||
VectorValues expected = createCorrectDelta(); // expected solution
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
|
||||
///* ************************************************************************* */
|
||||
//TEST(GaussianJunctionTreeB, slamlike) {
|
||||
// Values init;
|
||||
|
|
|
@ -737,9 +737,7 @@ TEST(GncOptimizer, setInlierCostThresholds) {
|
|||
TEST(GncOptimizer, optimizeSmallPoseGraph) {
|
||||
/// load small pose graph
|
||||
const string filename = findExampleDataFile("w100.graph");
|
||||
NonlinearFactorGraph::shared_ptr graph;
|
||||
Values::shared_ptr initial;
|
||||
std::tie(graph, initial) = load2D(filename);
|
||||
const auto [graph, initial] = load2D(filename);
|
||||
// Add a Gaussian prior on first poses
|
||||
Pose2 priorMean(0.0, 0.0, 0.0); // prior at origin
|
||||
SharedDiagonal priorNoise = noiseModel::Diagonal::Sigmas(
|
||||
|
|
|
@ -63,16 +63,12 @@ std::tuple<NonlinearFactorGraph, Values> generateProblem() {
|
|||
Pose2 x5(2.1, 2.1, -M_PI_2);
|
||||
initialEstimate.insert(5, x5);
|
||||
|
||||
return std::tie(graph, initialEstimate);
|
||||
return {graph, initialEstimate};
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(NonlinearConjugateGradientOptimizer, Optimize) {
|
||||
|
||||
NonlinearFactorGraph graph;
|
||||
Values initialEstimate;
|
||||
|
||||
std::tie(graph, initialEstimate) = generateProblem();
|
||||
const auto [graph, initialEstimate] = generateProblem();
|
||||
// cout << "initial error = " << graph.error(initialEstimate) << endl;
|
||||
|
||||
NonlinearOptimizerParams param;
|
||||
|
|
|
@ -59,10 +59,8 @@ TEST( Iterative, conjugateGradientDescent )
|
|||
VectorValues expected = fg.optimize();
|
||||
|
||||
// get matrices
|
||||
Matrix A;
|
||||
Vector b;
|
||||
Vector x0 = Z_6x1;
|
||||
std::tie(A, b) = fg.jacobian();
|
||||
const auto [A, b] = fg.jacobian();
|
||||
Vector expectedX = (Vector(6) << -0.1, 0.1, -0.1, -0.1, 0.1, -0.2).finished();
|
||||
|
||||
// Do conjugate gradient descent, System version
|
||||
|
|
|
@ -62,9 +62,7 @@ static double error(const GaussianFactorGraph& fg, const VectorValues& x) {
|
|||
/* ************************************************************************* */
|
||||
TEST(SubgraphPreconditioner, planarGraph) {
|
||||
// Check planar graph construction
|
||||
GaussianFactorGraph A;
|
||||
VectorValues xtrue;
|
||||
std::tie(A, xtrue) = planarGraph(3);
|
||||
const auto [A, xtrue] = planarGraph(3);
|
||||
LONGS_EQUAL(13, A.size());
|
||||
LONGS_EQUAL(9, xtrue.size());
|
||||
DOUBLES_EQUAL(0, error(A, xtrue), 1e-9); // check zero error for xtrue
|
||||
|
@ -78,13 +76,10 @@ TEST(SubgraphPreconditioner, planarGraph) {
|
|||
/* ************************************************************************* */
|
||||
TEST(SubgraphPreconditioner, splitOffPlanarTree) {
|
||||
// Build a planar graph
|
||||
GaussianFactorGraph A;
|
||||
VectorValues xtrue;
|
||||
std::tie(A, xtrue) = planarGraph(3);
|
||||
const auto [A, xtrue] = planarGraph(3);
|
||||
|
||||
// Get the spanning tree and constraints, and check their sizes
|
||||
GaussianFactorGraph T, C;
|
||||
std::tie(T, C) = splitOffPlanarTree(3, A);
|
||||
const auto [T, C] = splitOffPlanarTree(3, A);
|
||||
LONGS_EQUAL(9, T.size());
|
||||
LONGS_EQUAL(4, C.size());
|
||||
|
||||
|
@ -97,14 +92,11 @@ TEST(SubgraphPreconditioner, splitOffPlanarTree) {
|
|||
/* ************************************************************************* */
|
||||
TEST(SubgraphPreconditioner, system) {
|
||||
// Build a planar graph
|
||||
GaussianFactorGraph Ab;
|
||||
VectorValues xtrue;
|
||||
size_t N = 3;
|
||||
std::tie(Ab, xtrue) = planarGraph(N); // A*x-b
|
||||
const auto [Ab, xtrue] = planarGraph(N); // A*x-b
|
||||
|
||||
// Get the spanning tree and remaining graph
|
||||
GaussianFactorGraph Ab1, Ab2; // A1*x-b1 and A2*x-b2
|
||||
std::tie(Ab1, Ab2) = splitOffPlanarTree(N, Ab);
|
||||
auto [Ab1, Ab2] = splitOffPlanarTree(N, Ab);
|
||||
|
||||
// Eliminate the spanning tree to build a prior
|
||||
const Ordering ord = planarOrdering(N);
|
||||
|
@ -120,11 +112,9 @@ TEST(SubgraphPreconditioner, system) {
|
|||
Ab2.add(key(1, 1), Z_2x2, Z_2x1);
|
||||
Ab2.add(key(1, 2), Z_2x2, Z_2x1);
|
||||
Ab2.add(key(1, 3), Z_2x2, Z_2x1);
|
||||
Matrix A, A1, A2;
|
||||
Vector b, b1, b2;
|
||||
std::tie(A, b) = Ab.jacobian(ordering);
|
||||
std::tie(A1, b1) = Ab1.jacobian(ordering);
|
||||
std::tie(A2, b2) = Ab2.jacobian(ordering);
|
||||
const auto [A, b] = Ab.jacobian(ordering);
|
||||
const auto [A1, b1] = Ab1.jacobian(ordering);
|
||||
const auto [A2, b2] = Ab2.jacobian(ordering);
|
||||
Matrix R1 = Rc1.matrix(ordering).first;
|
||||
Matrix Abar(13 * 2, 9 * 2);
|
||||
Abar.topRows(9 * 2) = Matrix::Identity(9 * 2, 9 * 2);
|
||||
|
@ -193,14 +183,11 @@ TEST(SubgraphPreconditioner, system) {
|
|||
/* ************************************************************************* */
|
||||
TEST(SubgraphPreconditioner, conjugateGradients) {
|
||||
// Build a planar graph
|
||||
GaussianFactorGraph Ab;
|
||||
VectorValues xtrue;
|
||||
size_t N = 3;
|
||||
std::tie(Ab, xtrue) = planarGraph(N); // A*x-b
|
||||
const auto [Ab, xtrue] = planarGraph(N); // A*x-b
|
||||
|
||||
// Get the spanning tree
|
||||
GaussianFactorGraph Ab1, Ab2; // A1*x-b1 and A2*x-b2
|
||||
std::tie(Ab1, Ab2) = splitOffPlanarTree(N, Ab);
|
||||
const auto [Ab1, Ab2] = splitOffPlanarTree(N, Ab);
|
||||
|
||||
// Eliminate the spanning tree to build a prior
|
||||
GaussianBayesNet Rc1 = *Ab1.eliminateSequential(); // R1*x-c1
|
||||
|
|
|
@ -55,9 +55,7 @@ TEST( SubgraphSolver, Parameters )
|
|||
TEST( SubgraphSolver, splitFactorGraph )
|
||||
{
|
||||
// Build a planar graph
|
||||
GaussianFactorGraph Ab;
|
||||
VectorValues xtrue;
|
||||
std::tie(Ab, xtrue) = example::planarGraph(N); // A*x-b
|
||||
const auto [Ab, xtrue] = example::planarGraph(N); // A*x-b
|
||||
|
||||
SubgraphBuilderParameters params;
|
||||
params.augmentationFactor = 0.0;
|
||||
|
@ -65,8 +63,7 @@ TEST( SubgraphSolver, splitFactorGraph )
|
|||
auto subgraph = builder(Ab);
|
||||
EXPECT_LONGS_EQUAL(9, subgraph.size());
|
||||
|
||||
GaussianFactorGraph Ab1, Ab2;
|
||||
std::tie(Ab1, Ab2) = splitFactorGraph(Ab, subgraph);
|
||||
const auto [Ab1, Ab2] = splitFactorGraph(Ab, subgraph);
|
||||
EXPECT_LONGS_EQUAL(9, Ab1.size());
|
||||
EXPECT_LONGS_EQUAL(13, Ab2.size());
|
||||
}
|
||||
|
@ -75,9 +72,7 @@ TEST( SubgraphSolver, splitFactorGraph )
|
|||
TEST( SubgraphSolver, constructor1 )
|
||||
{
|
||||
// Build a planar graph
|
||||
GaussianFactorGraph Ab;
|
||||
VectorValues xtrue;
|
||||
std::tie(Ab, xtrue) = example::planarGraph(N); // A*x-b
|
||||
const auto [Ab, xtrue] = example::planarGraph(N); // A*x-b
|
||||
|
||||
// The first constructor just takes a factor graph (and kParameters)
|
||||
// and it will split the graph into A1 and A2, where A1 is a spanning tree
|
||||
|
@ -90,14 +85,11 @@ TEST( SubgraphSolver, constructor1 )
|
|||
TEST( SubgraphSolver, constructor2 )
|
||||
{
|
||||
// Build a planar graph
|
||||
GaussianFactorGraph Ab;
|
||||
VectorValues xtrue;
|
||||
size_t N = 3;
|
||||
std::tie(Ab, xtrue) = example::planarGraph(N); // A*x-b
|
||||
const auto [Ab, xtrue] = example::planarGraph(N); // A*x-b
|
||||
|
||||
// Get the spanning tree
|
||||
GaussianFactorGraph Ab1, Ab2; // A1*x-b1 and A2*x-b2
|
||||
std::tie(Ab1, Ab2) = example::splitOffPlanarTree(N, Ab);
|
||||
// Get the spanning tree, A1*x-b1 and A2*x-b2
|
||||
const auto [Ab1, Ab2] = example::splitOffPlanarTree(N, Ab);
|
||||
|
||||
// The second constructor takes two factor graphs, so the caller can specify
|
||||
// the preconditioner (Ab1) and the constraints that are left out (Ab2)
|
||||
|
@ -110,14 +102,12 @@ TEST( SubgraphSolver, constructor2 )
|
|||
TEST( SubgraphSolver, constructor3 )
|
||||
{
|
||||
// Build a planar graph
|
||||
GaussianFactorGraph Ab;
|
||||
VectorValues xtrue;
|
||||
size_t N = 3;
|
||||
std::tie(Ab, xtrue) = example::planarGraph(N); // A*x-b
|
||||
const auto [Ab, xtrue] = example::planarGraph(N); // A*x-b
|
||||
|
||||
// Get the spanning tree and corresponding kOrdering
|
||||
GaussianFactorGraph Ab1, Ab2; // A1*x-b1 and A2*x-b2
|
||||
std::tie(Ab1, Ab2) = example::splitOffPlanarTree(N, Ab);
|
||||
// A1*x-b1 and A2*x-b2
|
||||
const auto [Ab1, Ab2] = example::splitOffPlanarTree(N, Ab);
|
||||
|
||||
// The caller solves |A1*x-b1|^2 == |R1*x-c1|^2, where R1 is square UT
|
||||
auto Rc1 = *Ab1.eliminateSequential();
|
||||
|
|
|
@ -33,11 +33,9 @@ int main(int argc, char *argv[]) {
|
|||
size_t trials = 1;
|
||||
|
||||
// read graph
|
||||
Values::shared_ptr solution;
|
||||
NonlinearFactorGraph::shared_ptr g;
|
||||
string inputFile = findExampleDataFile("w10000");
|
||||
auto model = noiseModel::Diagonal::Sigmas((Vector(3) << 0.05, 0.05, 5.0 * M_PI / 180.0).finished());
|
||||
std::tie(g, solution) = load2D(inputFile, model);
|
||||
const auto [g, solution] = load2D(inputFile, model);
|
||||
|
||||
// add noise to create initial estimate
|
||||
Values initial;
|
||||
|
|
Loading…
Reference in New Issue