diff --git a/gtsam/inference/BayesNet.h b/gtsam/inference/BayesNet.h index 1b118adc7..d562f3ae6 100644 --- a/gtsam/inference/BayesNet.h +++ b/gtsam/inference/BayesNet.h @@ -39,7 +39,7 @@ class BayesNet : public FactorGraph { sharedConditional; ///< A shared pointer to a conditional protected: - /// @name Standard Constructors + /// @name Protected Constructors /// @{ /** Default constructor as an empty BayesNet */ diff --git a/gtsam/inference/FactorGraph.h b/gtsam/inference/FactorGraph.h index 89fd09037..915ae273a 100644 --- a/gtsam/inference/FactorGraph.h +++ b/gtsam/inference/FactorGraph.h @@ -154,10 +154,24 @@ class FactorGraph { /// @} public: + /// @name Constructors + /// @{ + /// Default destructor - // Public and virtual so boost serialization can call it. + /// Public and virtual so boost serialization can call it. virtual ~FactorGraph() = default; + /** + * Constructor that takes an initializer list of shared pointers. + * FactorGraph fg = {make_shared(), ...}; + */ + template > + FactorGraph( + std::initializer_list> sharedFactors) { + for (auto&& f : sharedFactors) factors_.push_back(f); + } + + /// @} /// @name Adding Single Factors /// @{ diff --git a/gtsam/linear/Errors.cpp b/gtsam/linear/Errors.cpp index 41c6c3d09..e7d893d25 100644 --- a/gtsam/linear/Errors.cpp +++ b/gtsam/linear/Errors.cpp @@ -25,9 +25,6 @@ using namespace std; namespace gtsam { -/* ************************************************************************* */ -Errors::Errors(){} - /* ************************************************************************* */ Errors::Errors(const VectorValues& V) { for(const Vector& e: V | boost::adaptors::map_values) { diff --git a/gtsam/linear/Errors.h b/gtsam/linear/Errors.h index f6e147084..41ba44144 100644 --- a/gtsam/linear/Errors.h +++ b/gtsam/linear/Errors.h @@ -31,29 +31,31 @@ namespace gtsam { class VectorValues; /** vector of errors */ - class Errors : public FastList { + class GTSAM_EXPORT Errors : public FastList { + + using Base = FastList; public: - GTSAM_EXPORT Errors() ; + using Base::Base; // inherit constructors /** break V into pieces according to its start indices */ - GTSAM_EXPORT Errors(const VectorValues&V); + Errors(const VectorValues&V); /** print */ - GTSAM_EXPORT void print(const std::string& s = "Errors") const; + void print(const std::string& s = "Errors") const; /** equals, for unit testing */ - GTSAM_EXPORT bool equals(const Errors& expected, double tol=1e-9) const; + bool equals(const Errors& expected, double tol=1e-9) const; /** Addition */ - GTSAM_EXPORT Errors operator+(const Errors& b) const; + Errors operator+(const Errors& b) const; /** subtraction */ - GTSAM_EXPORT Errors operator-(const Errors& b) const; + Errors operator-(const Errors& b) const; /** negation */ - GTSAM_EXPORT Errors operator-() const ; + Errors operator-() const ; }; // Errors diff --git a/gtsam/linear/GaussianBayesNet.h b/gtsam/linear/GaussianBayesNet.h index c8a28e075..a63cfa6c7 100644 --- a/gtsam/linear/GaussianBayesNet.h +++ b/gtsam/linear/GaussianBayesNet.h @@ -65,8 +65,19 @@ namespace gtsam { explicit GaussianBayesNet(const FactorGraph& graph) : Base(graph) {} + /** + * Constructor that takes an initializer list of shared pointers. + * BayesNet bn = {make_shared(), ...}; + */ + template + GaussianBayesNet( + std::initializer_list > + sharedConditionals) { + for (auto&& gc : sharedConditionals) push_back(gc); + } + /// Destructor - virtual ~GaussianBayesNet() {} + virtual ~GaussianBayesNet() = default; /// @} diff --git a/gtsam/linear/GaussianFactorGraph.h b/gtsam/linear/GaussianFactorGraph.h index 0d5057aa8..9a4885689 100644 --- a/gtsam/linear/GaussianFactorGraph.h +++ b/gtsam/linear/GaussianFactorGraph.h @@ -74,9 +74,14 @@ namespace gtsam { typedef EliminateableFactorGraph BaseEliminateable; ///< Typedef to base elimination class typedef boost::shared_ptr shared_ptr; ///< shared_ptr to this class + /// @name Constructors + /// @{ + /** Default constructor */ GaussianFactorGraph() {} + using Base::Base; // Inherit constructors + /** Construct from iterator over factors */ template GaussianFactorGraph(ITERATOR firstFactor, ITERATOR lastFactor) : Base(firstFactor, lastFactor) {} @@ -92,6 +97,7 @@ namespace gtsam { /** Virtual destructor */ virtual ~GaussianFactorGraph() {} + /// @} /// @name Testable /// @{ diff --git a/gtsam/linear/tests/testErrors.cpp b/gtsam/linear/tests/testErrors.cpp index 507254a6c..415cd9496 100644 --- a/gtsam/linear/tests/testErrors.cpp +++ b/gtsam/linear/tests/testErrors.cpp @@ -24,11 +24,12 @@ using namespace gtsam; /* ************************************************************************* */ TEST(Errors, arithmetic) { - Errors e{Vector2(1.0, 2.0), Vector3(3.0, 4.0, 5.0)}; + Errors e = std::list{Vector2(1.0, 2.0), Vector3(3.0, 4.0, 5.0)}; DOUBLES_EQUAL(1 + 4 + 9 + 16 + 25, dot(e, e), 1e-9); axpy(2.0, e, e); - const Errors expected{Vector2(3.0, 6.0), Vector3(9.0, 12.0, 15.0)}; + const Errors expected = + std::list{Vector2(3.0, 6.0), Vector3(9.0, 12.0, 15.0)}; CHECK(assert_equal(expected, e)); } diff --git a/gtsam/linear/tests/testGaussianBayesNet.cpp b/gtsam/linear/tests/testGaussianBayesNet.cpp index 4113c3efd..7b55d00c3 100644 --- a/gtsam/linear/tests/testGaussianBayesNet.cpp +++ b/gtsam/linear/tests/testGaussianBayesNet.cpp @@ -38,15 +38,15 @@ using symbol_shorthand::X; static const Key _x_ = 11, _y_ = 22, _z_ = 33; -static GaussianBayesNet smallBayesNet = - list_of(GaussianConditional(_x_, Vector1::Constant(9), I_1x1, _y_, I_1x1))( - GaussianConditional(_y_, Vector1::Constant(5), I_1x1)); +static GaussianBayesNet smallBayesNet = { + boost::make_shared(_x_, Vector1::Constant(9), I_1x1, _y_, I_1x1), + boost::make_shared(_y_, Vector1::Constant(5), I_1x1)}; -static GaussianBayesNet noisyBayesNet = - list_of(GaussianConditional(_x_, Vector1::Constant(9), I_1x1, _y_, I_1x1, - noiseModel::Isotropic::Sigma(1, 2.0)))( - GaussianConditional(_y_, Vector1::Constant(5), I_1x1, - noiseModel::Isotropic::Sigma(1, 3.0))); +static GaussianBayesNet noisyBayesNet = { + boost::make_shared(_x_, Vector1::Constant(9), I_1x1, _y_, I_1x1, + noiseModel::Isotropic::Sigma(1, 2.0)), + boost::make_shared(_y_, Vector1::Constant(5), I_1x1, + noiseModel::Isotropic::Sigma(1, 3.0))}; /* ************************************************************************* */ TEST( GaussianBayesNet, Matrix ) @@ -112,11 +112,11 @@ TEST( GaussianBayesNet, NoisyMatrix ) /* ************************************************************************* */ TEST(GaussianBayesNet, Optimize) { - VectorValues expected = - map_list_of(_x_, Vector1::Constant(4))(_y_, Vector1::Constant(5)); - VectorValues actual = smallBayesNet.optimize(); + const VectorValues expected{{_x_, Vector1::Constant(4)}, + {_y_, Vector1::Constant(5)}}; + const VectorValues actual = smallBayesNet.optimize(); EXPECT(assert_equal(expected, actual)); -} + } /* ************************************************************************* */ TEST(GaussianBayesNet, NoisyOptimize) { @@ -124,7 +124,7 @@ TEST(GaussianBayesNet, NoisyOptimize) { Vector d; boost::tie(R, d) = noisyBayesNet.matrix(); // find matrix and RHS const Vector x = R.inverse() * d; - VectorValues expected = map_list_of(_x_, x.head(1))(_y_, x.tail(1)); + const VectorValues expected{{_x_, x.head(1)}, {_y_, x.tail(1)}}; VectorValues actual = noisyBayesNet.optimize(); EXPECT(assert_equal(expected, actual)); @@ -133,17 +133,16 @@ TEST(GaussianBayesNet, NoisyOptimize) { /* ************************************************************************* */ TEST( GaussianBayesNet, optimizeIncomplete ) { - static GaussianBayesNet incompleteBayesNet = list_of - (GaussianConditional(_x_, Vector1::Constant(9), I_1x1, _y_, I_1x1)); + static GaussianBayesNet incompleteBayesNet; + incompleteBayesNet.emplace_shared( + _x_, Vector1::Constant(9), I_1x1, _y_, I_1x1); - VectorValues solutionForMissing = map_list_of - (_y_, Vector1::Constant(5)); + VectorValues solutionForMissing { {_y_, Vector1::Constant(5)} }; VectorValues actual = incompleteBayesNet.optimize(solutionForMissing); - VectorValues expected = map_list_of - (_x_, Vector1::Constant(4)) - (_y_, Vector1::Constant(5)); + VectorValues expected{{_x_, Vector1::Constant(4)}, + {_y_, Vector1::Constant(5)}}; EXPECT(assert_equal(expected,actual)); } @@ -156,14 +155,11 @@ TEST( GaussianBayesNet, optimize3 ) // 5 1 5 // NOTE: we are supplying a new RHS here - VectorValues expected = map_list_of - (_x_, Vector1::Constant(-1)) - (_y_, Vector1::Constant(5)); + VectorValues expected { {_x_, Vector1::Constant(-1)}, + {_y_, Vector1::Constant(5)} }; // Test different RHS version - VectorValues gx = map_list_of - (_x_, Vector1::Constant(4)) - (_y_, Vector1::Constant(5)); + VectorValues gx{{_x_, Vector1::Constant(4)}, {_y_, Vector1::Constant(5)}}; VectorValues actual = smallBayesNet.backSubstitute(gx); EXPECT(assert_equal(expected, actual)); } @@ -173,9 +169,9 @@ namespace sampling { static Matrix A1 = (Matrix(2, 2) << 1., 2., 3., 4.).finished(); static const Vector2 mean(20, 40), b(10, 10); static const double sigma = 0.01; -static const GaussianBayesNet gbn = - list_of(GaussianConditional::FromMeanAndStddev(X(0), A1, X(1), b, sigma))( - GaussianDensity::FromMeanAndStddev(X(1), mean, sigma)); +static const GaussianBayesNet gbn = { + GaussianConditional::sharedMeanAndStddev(X(0), A1, X(1), b, sigma), + GaussianDensity::sharedMeanAndStddev(X(1), mean, sigma)}; } // namespace sampling /* ************************************************************************* */ @@ -250,13 +246,9 @@ TEST( GaussianBayesNet, backSubstituteTranspose ) // x=R'*y, expected=inv(R')*x // 2 = 1 2 // 5 1 1 3 - VectorValues - x = map_list_of - (_x_, Vector1::Constant(2)) - (_y_, Vector1::Constant(5)), - expected = map_list_of - (_x_, Vector1::Constant(2)) - (_y_, Vector1::Constant(3)); + const VectorValues x{{_x_, Vector1::Constant(2)}, + {_y_, Vector1::Constant(5)}}, + expected{{_x_, Vector1::Constant(2)}, {_y_, Vector1::Constant(3)}}; VectorValues actual = smallBayesNet.backSubstituteTranspose(x); EXPECT(assert_equal(expected, actual)); @@ -273,13 +265,8 @@ TEST( GaussianBayesNet, backSubstituteTransposeNoisy ) // x=R'*y, expected=inv(R')*x // 2 = 1 2 // 5 1 1 3 - VectorValues - x = map_list_of - (_x_, Vector1::Constant(2)) - (_y_, Vector1::Constant(5)), - expected = map_list_of - (_x_, Vector1::Constant(4)) - (_y_, Vector1::Constant(9)); + VectorValues x{{_x_, Vector1::Constant(2)}, {_y_, Vector1::Constant(5)}}, + expected{{_x_, Vector1::Constant(4)}, {_y_, Vector1::Constant(9)}}; VectorValues actual = noisyBayesNet.backSubstituteTranspose(x); EXPECT(assert_equal(expected, actual)); diff --git a/gtsam/linear/tests/testGaussianBayesTree.cpp b/gtsam/linear/tests/testGaussianBayesTree.cpp index 18b4674b5..ba499ea24 100644 --- a/gtsam/linear/tests/testGaussianBayesTree.cpp +++ b/gtsam/linear/tests/testGaussianBayesTree.cpp @@ -23,43 +23,45 @@ #include #include -#include -#include // for operator += -#include // for operator += #include +#include -using namespace boost::assign; using namespace std::placeholders; using namespace std; using namespace gtsam; +using Pairs = std::vector>; + namespace { const Key x1=1, x2=2, x3=3, x4=4; const SharedDiagonal chainNoise = noiseModel::Isotropic::Sigma(1, 0.5); - const GaussianFactorGraph chain = list_of - (JacobianFactor(x2, (Matrix(1, 1) << 1.).finished(), x1, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise)) - (JacobianFactor(x2, (Matrix(1, 1) << 1.).finished(), x3, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise)) - (JacobianFactor(x3, (Matrix(1, 1) << 1.).finished(), x4, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise)) - (JacobianFactor(x4, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise)); - const Ordering chainOrdering = Ordering(list_of(x2)(x1)(x3)(x4)); + const GaussianFactorGraph chain = { + boost::make_shared( + x2, I_1x1, x1, I_1x1, (Vector(1) << 1.).finished(), chainNoise), + boost::make_shared( + x2, I_1x1, x3, I_1x1, (Vector(1) << 1.).finished(), chainNoise), + boost::make_shared( + x3, I_1x1, x4, I_1x1, (Vector(1) << 1.).finished(), chainNoise), + boost::make_shared( + x4, I_1x1, (Vector(1) << 1.).finished(), chainNoise)}; + const Ordering chainOrdering {x2, x1, x3, x4}; /* ************************************************************************* */ // Helper functions for below - GaussianBayesTreeClique::shared_ptr MakeClique(const GaussianConditional& conditional) + GaussianBayesTreeClique::shared_ptr LeafClique(const GaussianConditional& conditional) { return boost::make_shared( boost::make_shared(conditional)); } - template + typedef std::vector Children; + GaussianBayesTreeClique::shared_ptr MakeClique( - const GaussianConditional& conditional, const CHILDREN& children) + const GaussianConditional& conditional, const Children& children) { - GaussianBayesTreeClique::shared_ptr clique = - boost::make_shared( - boost::make_shared(conditional)); + auto clique = LeafClique(conditional); clique->children.assign(children.begin(), children.end()); - for(typename CHILDREN::const_iterator child = children.begin(); child != children.end(); ++child) + for(Children::const_iterator child = children.begin(); child != children.end(); ++child) (*child)->parent_ = clique; return clique; } @@ -90,23 +92,25 @@ TEST( GaussianBayesTree, eliminate ) EXPECT_LONGS_EQUAL(3, scatter.at(2).key); EXPECT_LONGS_EQUAL(4, scatter.at(3).key); - Matrix two = (Matrix(1, 1) << 2.).finished(); - Matrix one = (Matrix(1, 1) << 1.).finished(); + const Matrix two = (Matrix(1, 1) << 2.).finished(); + const Matrix one = I_1x1; + + const GaussianConditional gc1( + std::map{ + {x3, (Matrix21() << 2., 0.).finished()}, + {x4, (Matrix21() << 2., 2.).finished()}, + }, + 2, Vector2(2., 2.)); + const GaussianConditional gc2( + Pairs{ + {x2, (Matrix21() << -2. * sqrt(2.), 0.).finished()}, + {x1, (Matrix21() << -sqrt(2.), -sqrt(2.)).finished()}, + {x3, (Matrix21() << -sqrt(2.), sqrt(2.)).finished()}, + }, + 2, (Vector(2) << -2. * sqrt(2.), 0.).finished()); GaussianBayesTree bayesTree_expected; - bayesTree_expected.insertRoot( - MakeClique( - GaussianConditional( - pair_list_of(x3, (Matrix21() << 2., 0.).finished())( - x4, (Matrix21() << 2., 2.).finished()), 2, Vector2(2., 2.)), - list_of( - MakeClique( - GaussianConditional( - pair_list_of(x2, - (Matrix21() << -2. * sqrt(2.), 0.).finished())(x1, - (Matrix21() << -sqrt(2.), -sqrt(2.)).finished())(x3, - (Matrix21() << -sqrt(2.), sqrt(2.)).finished()), 2, - (Vector(2) << -2. * sqrt(2.), 0.).finished()))))); + bayesTree_expected.insertRoot(MakeClique(gc1, Children{LeafClique(gc2)})); EXPECT(assert_equal(bayesTree_expected, bt)); } @@ -114,11 +118,10 @@ TEST( GaussianBayesTree, eliminate ) /* ************************************************************************* */ TEST( GaussianBayesTree, optimizeMultiFrontal ) { - VectorValues expected = pair_list_of - (x1, (Vector(1) << 0.).finished()) - (x2, (Vector(1) << 1.).finished()) - (x3, (Vector(1) << 0.).finished()) - (x4, (Vector(1) << 1.).finished()); + VectorValues expected = {{x1, (Vector(1) << 0.).finished()}, + {x2, (Vector(1) << 1.).finished()}, + {x3, (Vector(1) << 0.).finished()}, + {x4, (Vector(1) << 1.).finished()}}; VectorValues actual = chain.eliminateMultifrontal(chainOrdering)->optimize(); EXPECT(assert_equal(expected,actual)); @@ -126,40 +129,61 @@ TEST( GaussianBayesTree, optimizeMultiFrontal ) /* ************************************************************************* */ TEST(GaussianBayesTree, complicatedMarginal) { - // Create the conditionals to go in the BayesTree + const GaussianConditional gc1( + Pairs{{11, (Matrix(3, 1) << 0.0971, 0, 0).finished()}, + {12, (Matrix(3, 2) << 0.3171, 0.4387, 0.9502, 0.3816, 0, 0.7655) + .finished()}}, + 2, Vector3(0.2638, 0.1455, 0.1361)); + const GaussianConditional gc2( + Pairs{ + {9, (Matrix(3, 1) << 0.7952, 0, 0).finished()}, + {10, (Matrix(3, 2) << 0.4456, 0.7547, 0.6463, 0.2760, 0, 0.6797) + .finished()}, + {11, (Matrix(3, 1) << 0.6551, 0.1626, 0.1190).finished()}, + {12, (Matrix(3, 2) << 0.4984, 0.5853, 0.9597, 0.2238, 0.3404, 0.7513) + .finished()}}, + 2, Vector3(0.4314, 0.9106, 0.1818)); + const GaussianConditional gc3( + Pairs{{7, (Matrix(3, 1) << 0.2551, 0, 0).finished()}, + {8, (Matrix(3, 2) << 0.8909, 0.1386, 0.9593, 0.1493, 0, 0.2575) + .finished()}, + {11, (Matrix(3, 1) << 0.8407, 0.2543, 0.8143).finished()}}, + 2, Vector3(0.3998, 0.2599, 0.8001)); + const GaussianConditional gc4( + Pairs{{5, (Matrix(3, 1) << 0.2435, 0, 0).finished()}, + {6, (Matrix(3, 2) << 0.4733, 0.1966, 0.3517, 0.2511, 0.8308, 0.0) + .finished()}, + // NOTE the non-upper-triangular form + // here since this test was written when we had column permutations + // from LDL. The code still works currently (does not enfore + // upper-triangularity in this case) but this test will need to be + // redone if this stops working in the future + {7, (Matrix(3, 1) << 0.5853, 0.5497, 0.9172).finished()}, + {8, (Matrix(3, 2) << 0.2858, 0.3804, 0.7572, 0.5678, 0.7537, 0.0759) + .finished()}}, + 2, Vector3(0.8173, 0.8687, 0.0844)); + const GaussianConditional gc5( + Pairs{{3, (Matrix(3, 1) << 0.0540, 0, 0).finished()}, + {4, (Matrix(3, 2) << 0.9340, 0.4694, 0.1299, 0.0119, 0, 0.3371) + .finished()}, + {6, (Matrix(3, 2) << 0.1622, 0.5285, 0.7943, 0.1656, 0.3112, 0.6020) + .finished()}}, + 2, Vector3(0.9619, 0.0046, 0.7749)); + const GaussianConditional gc6( + Pairs{{1, (Matrix(3, 1) << 0.2630, 0, 0).finished()}, + {2, (Matrix(3, 2) << 0.7482, 0.2290, 0.4505, 0.9133, 0, 0.1524) + .finished()}, + {5, (Matrix(3, 1) << 0.8258, 0.5383, 0.9961).finished()}}, + 2, Vector3(0.0782, 0.4427, 0.1067)); + + // Create the bayes tree: GaussianBayesTree bt; - bt.insertRoot( - MakeClique(GaussianConditional(pair_list_of (11, (Matrix(3,1) << 0.0971, 0, 0).finished()) - (12, (Matrix(3,2) << 0.3171, 0.4387, 0.9502, 0.3816, 0, 0.7655).finished()), - 2, Vector3(0.2638, 0.1455, 0.1361)), list_of - (MakeClique(GaussianConditional(pair_list_of (9, (Matrix(3,1) << 0.7952, 0, 0).finished()) - (10, (Matrix(3,2) << 0.4456, 0.7547, 0.6463, 0.2760, 0, 0.6797).finished()) - (11, (Matrix(3,1) << 0.6551, 0.1626, 0.1190).finished()) - (12, (Matrix(3,2) << 0.4984, 0.5853, 0.9597, 0.2238, 0.3404, 0.7513).finished()), - 2, Vector3(0.4314, 0.9106, 0.1818)))) - (MakeClique(GaussianConditional(pair_list_of (7, (Matrix(3,1) << 0.2551, 0, 0).finished()) - (8, (Matrix(3,2) << 0.8909, 0.1386, 0.9593, 0.1493, 0, 0.2575).finished()) - (11, (Matrix(3,1) << 0.8407, 0.2543, 0.8143).finished()), - 2, Vector3(0.3998, 0.2599, 0.8001)), list_of - (MakeClique(GaussianConditional(pair_list_of (5, (Matrix(3,1) << 0.2435, 0, 0).finished()) - (6, (Matrix(3,2) << 0.4733, 0.1966, 0.3517, 0.2511, 0.8308, 0.0).finished()) - // NOTE the non-upper-triangular form - // here since this test was written when we had column permutations - // from LDL. The code still works currently (does not enfore - // upper-triangularity in this case) but this test will need to be - // redone if this stops working in the future - (7, (Matrix(3,1) << 0.5853, 0.5497, 0.9172).finished()) - (8, (Matrix(3,2) << 0.2858, 0.3804, 0.7572, 0.5678, 0.7537, 0.0759).finished()), - 2, Vector3(0.8173, 0.8687, 0.0844)), list_of - (MakeClique(GaussianConditional(pair_list_of (3, (Matrix(3,1) << 0.0540, 0, 0).finished()) - (4, (Matrix(3,2) << 0.9340, 0.4694, 0.1299, 0.0119, 0, 0.3371).finished()) - (6, (Matrix(3,2) << 0.1622, 0.5285, 0.7943, 0.1656, 0.3112, 0.6020).finished()), - 2, Vector3(0.9619, 0.0046, 0.7749)))) - (MakeClique(GaussianConditional(pair_list_of (1, (Matrix(3,1) << 0.2630, 0, 0).finished()) - (2, (Matrix(3,2) << 0.7482, 0.2290, 0.4505, 0.9133, 0, 0.1524).finished()) - (5, (Matrix(3,1) << 0.8258, 0.5383, 0.9961).finished()), - 2, Vector3(0.0782, 0.4427, 0.1067)))))))))); + bt.insertRoot(MakeClique( + gc1, Children{LeafClique(gc2), + MakeClique(gc3, Children{MakeClique( + gc4, Children{LeafClique(gc5), + LeafClique(gc6)})})})); // Marginal on 5 Matrix expectedCov = (Matrix(1,1) << 236.5166).finished(); @@ -201,61 +225,33 @@ double computeError(const GaussianBayesTree& gbt, const Vector10& values) { /* ************************************************************************* */ TEST(GaussianBayesTree, ComputeSteepestDescentPointBT) { + const GaussianConditional gc1( + Pairs{{2, (Matrix(6, 2) << 31.0, 32.0, 0.0, 34.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0) + .finished()}, + {3, (Matrix(6, 2) << 35.0, 36.0, 37.0, 38.0, 41.0, 42.0, 0.0, 44.0, + 0.0, 0.0, 0.0, 0.0) + .finished()}, + {4, (Matrix(6, 2) << 0.0, 0.0, 0.0, 0.0, 45.0, 46.0, 47.0, 48.0, + 51.0, 52.0, 0.0, 54.0) + .finished()}}, + 3, (Vector(6) << 29.0, 30.0, 39.0, 40.0, 49.0, 50.0).finished()), + gc2(Pairs{{0, (Matrix(4, 2) << 3.0, 4.0, 0.0, 6.0, 0.0, 0.0, 0.0, 0.0) + .finished()}, + {1, (Matrix(4, 2) << 0.0, 0.0, 0.0, 0.0, 17.0, 18.0, 0.0, 20.0) + .finished()}, + {2, (Matrix(4, 2) << 0.0, 0.0, 0.0, 0.0, 21.0, 22.0, 23.0, 24.0) + .finished()}, + {3, (Matrix(4, 2) << 7.0, 8.0, 9.0, 10.0, 0.0, 0.0, 0.0, 0.0) + .finished()}, + {4, (Matrix(4, 2) << 11.0, 12.0, 13.0, 14.0, 25.0, 26.0, 27.0, + 28.0) + .finished()}}, + 2, (Vector(4) << 1.0, 2.0, 15.0, 16.0).finished()); // Create an arbitrary Bayes Tree GaussianBayesTree bt; - bt.insertRoot(MakeClique(GaussianConditional( - pair_list_of - (2, (Matrix(6, 2) << - 31.0,32.0, - 0.0,34.0, - 0.0,0.0, - 0.0,0.0, - 0.0,0.0, - 0.0,0.0).finished()) - (3, (Matrix(6, 2) << - 35.0,36.0, - 37.0,38.0, - 41.0,42.0, - 0.0,44.0, - 0.0,0.0, - 0.0,0.0).finished()) - (4, (Matrix(6, 2) << - 0.0,0.0, - 0.0,0.0, - 45.0,46.0, - 47.0,48.0, - 51.0,52.0, - 0.0,54.0).finished()), - 3, (Vector(6) << 29.0,30.0,39.0,40.0,49.0,50.0).finished()), list_of - (MakeClique(GaussianConditional( - pair_list_of - (0, (Matrix(4, 2) << - 3.0,4.0, - 0.0,6.0, - 0.0,0.0, - 0.0,0.0).finished()) - (1, (Matrix(4, 2) << - 0.0,0.0, - 0.0,0.0, - 17.0,18.0, - 0.0,20.0).finished()) - (2, (Matrix(4, 2) << - 0.0,0.0, - 0.0,0.0, - 21.0,22.0, - 23.0,24.0).finished()) - (3, (Matrix(4, 2) << - 7.0,8.0, - 9.0,10.0, - 0.0,0.0, - 0.0,0.0).finished()) - (4, (Matrix(4, 2) << - 11.0,12.0, - 13.0,14.0, - 25.0,26.0, - 27.0,28.0).finished()), - 2, (Vector(4) << 1.0,2.0,15.0,16.0).finished()))))); + bt.insertRoot(MakeClique(gc1, Children{LeafClique(gc2)})); // Compute the Hessian numerically Matrix hessian = numericalHessian( @@ -276,12 +272,11 @@ TEST(GaussianBayesTree, ComputeSteepestDescentPointBT) { Vector expected = gradient * step; // Known steepest descent point from Bayes' net version - VectorValues expectedFromBN = pair_list_of - (0, Vector2(0.000129034, 0.000688183)) - (1, Vector2(0.0109679, 0.0253767)) - (2, Vector2(0.0680441, 0.114496)) - (3, Vector2(0.16125, 0.241294)) - (4, Vector2(0.300134, 0.423233)); + VectorValues expectedFromBN{{0, Vector2(0.000129034, 0.000688183)}, + {1, Vector2(0.0109679, 0.0253767)}, + {2, Vector2(0.0680441, 0.114496)}, + {3, Vector2(0.16125, 0.241294)}, + {4, Vector2(0.300134, 0.423233)}}; // Compute the steepest descent point with the dogleg function VectorValues actual = bt.optimizeGradientSearch(); diff --git a/gtsam/linear/tests/testGaussianConditional.cpp b/gtsam/linear/tests/testGaussianConditional.cpp index 36f7eb72d..7b5677560 100644 --- a/gtsam/linear/tests/testGaussianConditional.cpp +++ b/gtsam/linear/tests/testGaussianConditional.cpp @@ -44,6 +44,8 @@ static Matrix R = (Matrix(2, 2) << -12.1244, -5.1962, 0., 4.6904).finished(); +using Dims = std::vector; + /* ************************************************************************* */ TEST(GaussianConditional, constructor) { @@ -215,7 +217,7 @@ TEST( GaussianConditional, solve ) Vector sx1(2); sx1 << 1.0, 1.0; Vector sl1(2); sl1 << 1.0, 1.0; - VectorValues expected = {{1, expectedX} {2, sx1} {10, sl1}}; + VectorValues expected = {{1, expectedX}, {2, sx1}, {10, sl1}}; VectorValues solution = {{2, sx1}, // parents {10, sl1}}; @@ -228,7 +230,7 @@ TEST( GaussianConditional, solve ) TEST( GaussianConditional, solve_simple ) { // 2 variables, frontal has dim=4 - VerticalBlockMatrix blockMatrix(list_of(4)(2)(1), 4); + VerticalBlockMatrix blockMatrix(Dims{4, 2, 1}, 4); blockMatrix.matrix() << 1.0, 0.0, 2.0, 0.0, 3.0, 0.0, 0.1, 0.0, 1.0, 0.0, 2.0, 0.0, 3.0, 0.2, @@ -236,7 +238,7 @@ TEST( GaussianConditional, solve_simple ) 0.0, 0.0, 0.0, 3.0, 0.0, 4.0, 0.4; // solve system as a non-multifrontal version first - GaussianConditional cg(list_of(1)(2), 1, blockMatrix); + GaussianConditional cg(KeyVector{1,2}, 1, blockMatrix); // partial solution Vector sx1 = Vector2(9.0, 10.0); @@ -260,7 +262,7 @@ TEST( GaussianConditional, solve_simple ) TEST( GaussianConditional, solve_multifrontal ) { // create full system, 3 variables, 2 frontals, all 2 dim - VerticalBlockMatrix blockMatrix(list_of(2)(2)(2)(1), 4); + VerticalBlockMatrix blockMatrix(Dims{2, 2, 2, 1}, 4); blockMatrix.matrix() << 1.0, 0.0, 2.0, 0.0, 3.0, 0.0, 0.1, 0.0, 1.0, 0.0, 2.0, 0.0, 3.0, 0.2, @@ -268,7 +270,7 @@ TEST( GaussianConditional, solve_multifrontal ) 0.0, 0.0, 0.0, 3.0, 0.0, 4.0, 0.4; // 3 variables, all dim=2 - GaussianConditional cg(list_of(1)(2)(10), 2, blockMatrix); + GaussianConditional cg(KeyVector{1, 2, 10}, 2, blockMatrix); EXPECT(assert_equal(Vector(blockMatrix.full().rightCols(1)), cg.d())); @@ -305,9 +307,9 @@ TEST( GaussianConditional, solveTranspose ) { d2(0) = 5; // define nodes and specify in reverse topological sort (i.e. parents last) - GaussianBayesNet cbn = list_of - (GaussianConditional(1, d1, R11, 2, S12)) - (GaussianConditional(1, d2, R22)); + GaussianBayesNet cbn; + cbn.emplace_shared(1, d1, R11, 2, S12); + cbn.emplace_shared(1, d2, R22); // x=R'*y, y=inv(R')*x // 2 = 1 2 @@ -375,7 +377,7 @@ TEST(GaussianConditional, FromMeanAndStddev) { const Vector2 b(20, 40), x0(1, 2), x1(3, 4), x2(5, 6); const double sigma = 3; - VectorValues values = {{X(0), x0}, {X(1), x1}, {X(2), x2}; + VectorValues values{{X(0), x0}, {X(1), x1}, {X(2), x2}}; auto conditional1 = GaussianConditional::FromMeanAndStddev(X(0), A1, X(1), b, sigma); diff --git a/gtsam/linear/tests/testGaussianFactorGraph.cpp b/gtsam/linear/tests/testGaussianFactorGraph.cpp index 221f49956..ca9b31a1b 100644 --- a/gtsam/linear/tests/testGaussianFactorGraph.cpp +++ b/gtsam/linear/tests/testGaussianFactorGraph.cpp @@ -208,8 +208,9 @@ TEST(GaussianFactorGraph, gradient) { // 2*f(x) = 100*(x1+c[X(1)])^2 + 100*(x2-x1-[0.2;-0.1])^2 + 25*(l1-x1-[0.0;0.2])^2 + // 25*(l1-x2-[-0.2;0.3])^2 // worked out: df/dx1 = 100*[0.1;0.1] + 100*[0.2;-0.1]) + 25*[0.0;0.2] = [10+20;10-10+5] = [30;5] - VectorValues expected = map_list_of(1, Vector2(5.0, -12.5))(2, Vector2(30.0, 5.0))( - 0, Vector2(-25.0, 17.5)); + VectorValues expected{{1, Vector2(5.0, -12.5)}, + {2, Vector2(30.0, 5.0)}, + {0, Vector2(-25.0, 17.5)}}; // Check the gradient at delta=0 VectorValues zero = VectorValues::Zero(expected); @@ -221,14 +222,14 @@ TEST(GaussianFactorGraph, gradient) { VectorValues solution = fg.optimize(); VectorValues actual2 = fg.gradient(solution); EXPECT(assert_equal(VectorValues::Zero(solution), actual2)); -} + } /* ************************************************************************* */ TEST(GaussianFactorGraph, transposeMultiplication) { GaussianFactorGraph A = createSimpleGaussianFactorGraph(); - Errors e; - e += Vector2(0.0, 0.0), Vector2(15.0, 0.0), Vector2(0.0, -5.0), Vector2(-7.5, -5.0); + Errors e = std::list{Vector2(0.0, 0.0), Vector2(15.0, 0.0), + Vector2(0.0, -5.0), Vector2(-7.5, -5.0)}; VectorValues expected; expected.insert(1, Vector2(-37.5, -50.0)); @@ -284,7 +285,7 @@ TEST(GaussianFactorGraph, matrices2) { TEST(GaussianFactorGraph, multiplyHessianAdd) { GaussianFactorGraph gfg = createSimpleGaussianFactorGraph(); - VectorValues x = map_list_of(0, Vector2(1, 2))(1, Vector2(3, 4))(2, Vector2(5, 6)); + const VectorValues x{{0, Vector2(1, 2)}, {1, Vector2(3, 4)}, {2, Vector2(5, 6)}}; VectorValues expected; expected.insert(0, Vector2(-450, -450)); @@ -303,8 +304,8 @@ TEST(GaussianFactorGraph, multiplyHessianAdd) { /* ************************************************************************* */ static GaussianFactorGraph createGaussianFactorGraphWithHessianFactor() { GaussianFactorGraph gfg = createSimpleGaussianFactorGraph(); - gfg += HessianFactor(1, 2, 100*I_2x2, Z_2x2, Vector2(0.0, 1.0), - 400*I_2x2, Vector2(1.0, 1.0), 3.0); + gfg.emplace_shared(1, 2, 100 * I_2x2, Z_2x2, Vector2(0.0, 1.0), + 400 * I_2x2, Vector2(1.0, 1.0), 3.0); return gfg; } @@ -322,8 +323,7 @@ TEST(GaussianFactorGraph, multiplyHessianAdd2) { Y << -450, -450, 300, 400, 2950, 3450; EXPECT(assert_equal(Y, AtA * X)); - VectorValues x = map_list_of(0, Vector2(1, 2))(1, Vector2(3, 4))(2, Vector2(5, 6)); - + const VectorValues x {{0, Vector2(1, 2)}, {1, Vector2(3, 4)}, {2, Vector2(5, 6)}}; VectorValues expected; expected.insert(0, Vector2(-450, -450)); expected.insert(1, Vector2(300, 400)); @@ -367,10 +367,10 @@ TEST(GaussianFactorGraph, gradientAtZero) { /* ************************************************************************* */ TEST(GaussianFactorGraph, clone) { // 2 variables, frontal has dim=4 - VerticalBlockMatrix blockMatrix(list_of(4)(2)(1), 4); + VerticalBlockMatrix blockMatrix(KeyVector{4, 2, 1}, 4); blockMatrix.matrix() << 1.0, 0.0, 2.0, 0.0, 3.0, 0.0, 0.1, 0.0, 1.0, 0.0, 2.0, 0.0, 3.0, 0.2, 0.0, 0.0, 3.0, 0.0, 4.0, 0.0, 0.3, 0.0, 0.0, 0.0, 3.0, 0.0, 4.0, 0.4; - GaussianConditional cg(list_of(1)(2), 1, blockMatrix); + GaussianConditional cg(KeyVector{1, 2}, 1, blockMatrix); GaussianFactorGraph init_graph = createGaussianFactorGraphWithHessianFactor(); init_graph.push_back(GaussianFactor::shared_ptr()); /// Add null factor diff --git a/gtsam/linear/tests/testHessianFactor.cpp b/gtsam/linear/tests/testHessianFactor.cpp index 3e13ecf10..92ffe71ac 100644 --- a/gtsam/linear/tests/testHessianFactor.cpp +++ b/gtsam/linear/tests/testHessianFactor.cpp @@ -25,11 +25,6 @@ #include -#include -#include -#include -using namespace boost::assign; - #include #include @@ -38,6 +33,8 @@ using namespace gtsam; const double tol = 1e-5; +using Dims = std::vector; // For constructing block matrices + /* ************************************************************************* */ TEST(HessianFactor, Slot) { @@ -61,8 +58,8 @@ TEST(HessianFactor, emptyConstructor) /* ************************************************************************* */ TEST(HessianFactor, ConversionConstructor) { - HessianFactor expected(list_of(0)(1), - SymmetricBlockMatrix(list_of(2)(4)(1), (Matrix(7,7) << + HessianFactor expected(KeyVector{0, 1}, + SymmetricBlockMatrix(Dims{2, 4, 1}, (Matrix(7,7) << 125.0000, 0.0, -25.0000, 0.0, -100.0000, 0.0, 25.0000, 0.0, 125.0000, 0.0, -25.0000, 0.0, -100.0000, -17.5000, -25.0000, 0.0, 25.0000, 0.0, 0.0, 0.0, -5.0000, @@ -85,9 +82,7 @@ TEST(HessianFactor, ConversionConstructor) HessianFactor actual(jacobian); - VectorValues values = pair_list_of - (0, Vector2(1.0, 2.0)) - (1, (Vector(4) << 3.0, 4.0, 5.0, 6.0).finished()); + VectorValues values{{0, Vector2(1.0, 2.0)}, {1, Vector4(3.0, 4.0, 5.0, 6.0)}}; EXPECT_LONGS_EQUAL(2, (long)actual.size()); EXPECT(assert_equal(expected, actual, 1e-9)); @@ -108,7 +103,7 @@ TEST(HessianFactor, Constructor1) EXPECT(assert_equal(g, Vector(factor.linearTerm()))); EXPECT_LONGS_EQUAL(1, (long)factor.size()); - VectorValues dx = pair_list_of(0, Vector2(1.5, 2.5)); + VectorValues dx{{0, Vector2(1.5, 2.5)}}; // error 0.5*(f - 2*x'*g + x'*G*x) double expected = 80.375; @@ -150,9 +145,7 @@ TEST(HessianFactor, Constructor2) Vector dx0 = (Vector(1) << 0.5).finished(); Vector dx1 = Vector2(1.5, 2.5); - VectorValues dx = pair_list_of - (0, dx0) - (1, dx1); + VectorValues dx{{0, dx0}, {1, dx1}}; HessianFactor factor(0, 1, G11, G12, g1, G22, g2, f); @@ -171,10 +164,7 @@ TEST(HessianFactor, Constructor2) EXPECT(assert_equal(G22, factor.info().diagonalBlock(1))); // Check case when vector values is larger than factor - VectorValues dxLarge = pair_list_of - (0, dx0) - (1, dx1) - (2, Vector2(0.1, 0.2)); + VectorValues dxLarge {{0, dx0}, {1, dx1}, {2, Vector2(0.1, 0.2)}}; EXPECT_DOUBLES_EQUAL(expected, factor.error(dxLarge), 1e-10); } @@ -200,11 +190,7 @@ TEST(HessianFactor, Constructor3) Vector dx1 = Vector2(1.5, 2.5); Vector dx2 = Vector3(1.5, 2.5, 3.5); - VectorValues dx = pair_list_of - (0, dx0) - (1, dx1) - (2, dx2); - + VectorValues dx {{0, dx0}, {1, dx1}, {2, dx2}}; HessianFactor factor(0, 1, 2, G11, G12, G13, g1, G22, G23, g2, G33, g3, f); double expected = 371.3750; @@ -247,10 +233,7 @@ TEST(HessianFactor, ConstructorNWay) Vector dx1 = Vector2(1.5, 2.5); Vector dx2 = Vector3(1.5, 2.5, 3.5); - VectorValues dx = pair_list_of - (0, dx0) - (1, dx1) - (2, dx2); + VectorValues dx {{0, dx0}, {1, dx1}, {2, dx2}}; KeyVector js {0, 1, 2}; std::vector Gs; @@ -309,7 +292,7 @@ TEST(HessianFactor, CombineAndEliminate1) { hessian.augmentedInformation(), 1e-9)); // perform elimination on jacobian - Ordering ordering = list_of(1); + Ordering ordering {1}; GaussianConditional::shared_ptr expectedConditional; JacobianFactor::shared_ptr expectedFactor; boost::tie(expectedConditional, expectedFactor) = jacobian.eliminate(ordering); @@ -372,7 +355,7 @@ TEST(HessianFactor, CombineAndEliminate2) { hessian.augmentedInformation(), 1e-9)); // perform elimination on jacobian - Ordering ordering = list_of(0); + Ordering ordering {0}; GaussianConditional::shared_ptr expectedConditional; JacobianFactor::shared_ptr expectedFactor; boost::tie(expectedConditional, expectedFactor) = // @@ -437,10 +420,10 @@ TEST(HessianFactor, eliminate2 ) // eliminate the combined factor HessianFactor::shared_ptr combinedLF_Chol(new HessianFactor(combined)); - GaussianFactorGraph combinedLFG_Chol = list_of(combinedLF_Chol); + GaussianFactorGraph combinedLFG_Chol {combinedLF_Chol}; std::pair actual_Chol = - EliminateCholesky(combinedLFG_Chol, Ordering(list_of(0))); + EliminateCholesky(combinedLFG_Chol, Ordering{0}); // create expected Conditional Gaussian double oldSigma = 0.0894427; // from when R was made unit @@ -483,8 +466,8 @@ TEST(HessianFactor, combine) { 0.0, -8.94427191).finished(); Vector b = Vector2(2.23606798,-1.56524758); SharedDiagonal model = noiseModel::Diagonal::Sigmas(Vector::Ones(2)); - GaussianFactor::shared_ptr f(new JacobianFactor(0, A0, 1, A1, 2, A2, b, model)); - GaussianFactorGraph factors = list_of(f); + GaussianFactorGraph factors{ + boost::make_shared(0, A0, 1, A1, 2, A2, b, model)}; // Form Ab' * Ab HessianFactor actual(factors); @@ -514,7 +497,7 @@ TEST(HessianFactor, gradientAtZero) HessianFactor factor(0, 1, G11, G12, g1, G22, g2, f); // test gradient at zero - VectorValues expectedG = pair_list_of(0, -g1) (1, -g2); + VectorValues expectedG{{0, -g1}, {1, -g2}}; Matrix A; Vector b; boost::tie(A,b) = factor.jacobian(); KeyVector keys {0, 1}; EXPECT(assert_equal(-A.transpose()*b, expectedG.vector(keys))); @@ -537,7 +520,7 @@ TEST(HessianFactor, gradient) // test gradient Vector x0 = (Vector(1) << 3.0).finished(); Vector x1 = (Vector(2) << -3.5, 7.1).finished(); - VectorValues x = pair_list_of(0, x0) (1, x1); + VectorValues x {{0, x0}, {1, x1}}; Vector expectedGrad0 = (Vector(1) << 10.0).finished(); Vector expectedGrad1 = (Vector(2) << 4.5, 16.1).finished(); diff --git a/gtsam/linear/tests/testJacobianFactor.cpp b/gtsam/linear/tests/testJacobianFactor.cpp index 21146066d..e0f4d8a0b 100644 --- a/gtsam/linear/tests/testJacobianFactor.cpp +++ b/gtsam/linear/tests/testJacobianFactor.cpp @@ -25,26 +25,24 @@ #include #include -#include -#include #include #include using namespace std; using namespace gtsam; -using namespace boost::assign; + +using Dims = std::vector; // For constructing block matrices namespace { namespace simple { // Terms we'll use - const vector > terms = list_of > - (make_pair(5, Matrix3::Identity())) - (make_pair(10, 2*Matrix3::Identity())) - (make_pair(15, 3*Matrix3::Identity())); + const vector > terms{ + {5, I_3x3}, {10, 2 * I_3x3}, {15, 3 * I_3x3}}; - // RHS and sigmas - const Vector b = Vector3(1., 2., 3.); - const SharedDiagonal noise = noiseModel::Diagonal::Sigmas(Vector3(0.5, 0.5, 0.5)); + // RHS and sigmas + const Vector b = Vector3(1., 2., 3.); + const SharedDiagonal noise = + noiseModel::Diagonal::Sigmas(Vector3(0.5, 0.5, 0.5)); } } @@ -128,7 +126,7 @@ TEST(JacobianFactor, constructors_and_accessors) // VerticalBlockMatrix constructor JacobianFactor expected( boost::make_iterator_range(terms.begin(), terms.begin() + 3), b, noise); - VerticalBlockMatrix blockMatrix(list_of(3)(3)(3)(1), 3); + VerticalBlockMatrix blockMatrix(Dims{3, 3, 3, 1}, 3); blockMatrix(0) = terms[0].second; blockMatrix(1) = terms[1].second; blockMatrix(2) = terms[2].second; @@ -191,22 +189,25 @@ Key keyX(10), keyY(8), keyZ(12); double sigma1 = 0.1; Matrix A11 = I_2x2; Vector2 b1(2, -1); -JacobianFactor factor1(keyX, A11, b1, noiseModel::Isotropic::Sigma(2, sigma1)); +auto factor1 = boost::make_shared( + keyX, A11, b1, noiseModel::Isotropic::Sigma(2, sigma1)); double sigma2 = 0.5; Matrix A21 = -2 * I_2x2; Matrix A22 = 3 * I_2x2; Vector2 b2(4, -5); -JacobianFactor factor2(keyX, A21, keyY, A22, b2, noiseModel::Isotropic::Sigma(2, sigma2)); +auto factor2 = boost::make_shared( + keyX, A21, keyY, A22, b2, noiseModel::Isotropic::Sigma(2, sigma2)); double sigma3 = 1.0; Matrix A32 = -4 * I_2x2; Matrix A33 = 5 * I_2x2; Vector2 b3(3, -6); -JacobianFactor factor3(keyY, A32, keyZ, A33, b3, noiseModel::Isotropic::Sigma(2, sigma3)); +auto factor3 = boost::make_shared( + keyY, A32, keyZ, A33, b3, noiseModel::Isotropic::Sigma(2, sigma3)); -GaussianFactorGraph factors(list_of(factor1)(factor2)(factor3)); -Ordering ordering(list_of(keyX)(keyY)(keyZ)); +const GaussianFactorGraph factors { factor1, factor2, factor3 }; +const Ordering ordering { keyX, keyY, keyZ }; } /* ************************************************************************* */ @@ -436,11 +437,11 @@ TEST(JacobianFactor, eliminate) Vector9 sigmas; sigmas << s1, s0, s2; JacobianFactor combinedFactor(0, A0, 1, A1, b, noiseModel::Diagonal::Sigmas(sigmas, true)); - GaussianFactorGraph::EliminationResult expected = combinedFactor.eliminate(list_of(0)); + GaussianFactorGraph::EliminationResult expected = combinedFactor.eliminate(Ordering{0}); JacobianFactor::shared_ptr expectedJacobian = boost::dynamic_pointer_cast< JacobianFactor>(expected.second); - GaussianFactorGraph::EliminationResult actual = EliminateQR(gfg, list_of(0)); + GaussianFactorGraph::EliminationResult actual = EliminateQR(gfg, Ordering{0}); JacobianFactor::shared_ptr actualJacobian = boost::dynamic_pointer_cast< JacobianFactor>(actual.second); @@ -487,7 +488,7 @@ TEST(JacobianFactor, eliminate2 ) // eliminate the combined factor pair - actual = combined.eliminate(Ordering(list_of(2))); + actual = combined.eliminate(Ordering{2}); // create expected Conditional Gaussian double oldSigma = 0.0894427; // from when R was made unit @@ -539,11 +540,11 @@ TEST(JacobianFactor, EliminateQR) // Create factor graph const SharedDiagonal sig_4D = noiseModel::Isotropic::Sigma(4, 0.5); const SharedDiagonal sig_2D = noiseModel::Isotropic::Sigma(2, 0.5); - GaussianFactorGraph factors = list_of - (JacobianFactor(list_of(3)(5)(7)(9)(11), VerticalBlockMatrix(list_of(2)(2)(2)(2)(2)(1), Ab.block(0, 0, 4, 11)), sig_4D)) - (JacobianFactor(list_of(5)(7)(9)(11), VerticalBlockMatrix(list_of(2)(2)(2)(2)(1), Ab.block(4, 2, 4, 9)), sig_4D)) - (JacobianFactor(list_of(7)(9)(11), VerticalBlockMatrix(list_of(2)(2)(2)(1), Ab.block(8, 4, 4, 7)), sig_4D)) - (JacobianFactor(list_of(11), VerticalBlockMatrix(list_of(2)(1), Ab.block(12, 8, 2, 3)), sig_2D)); + GaussianFactorGraph factors = { + boost::make_shared(KeyVector{3, 5, 7, 9, 11}, VerticalBlockMatrix(Dims{2, 2, 2, 2, 2, 1}, Ab.block(0, 0, 4, 11)), sig_4D), + boost::make_shared(KeyVector{5, 7, 9, 11}, VerticalBlockMatrix(Dims{2, 2, 2, 2, 1}, Ab.block(4, 2, 4, 9)), sig_4D), + boost::make_shared(KeyVector{7, 9, 11}, VerticalBlockMatrix(Dims{2, 2, 2, 1}, Ab.block(8, 4, 4, 7)), sig_4D), + boost::make_shared(KeyVector{11}, VerticalBlockMatrix(Dims{2, 1}, Ab.block(12, 8, 2, 3)), sig_2D)}; // extract the dense matrix for the graph Matrix actualDense = factors.augmentedJacobian(); @@ -562,12 +563,12 @@ TEST(JacobianFactor, EliminateQR) 0., 0., 0., 0., 0., 0., 0., 0., 8.2503, 3.3757, 6.8476, 0., 0., 0., 0., 0., 0., 0., 0., 0., -5.7095, -0.0090).finished(); - GaussianConditional expectedFragment(list_of(3)(5)(7)(9)(11), 3, - VerticalBlockMatrix(list_of(2)(2)(2)(2)(2)(1), R.topRows(6)), + GaussianConditional expectedFragment(KeyVector{3,5,7,9,11}, 3, + VerticalBlockMatrix(Dims{2, 2, 2, 2, 2, 1}, R.topRows(6)), noiseModel::Unit::Create(6)); // Eliminate (3 frontal variables, 6 scalar columns) using QR !!!! - GaussianFactorGraph::EliminationResult actual = EliminateQR(factors, list_of(3)(5)(7)); + GaussianFactorGraph::EliminationResult actual = EliminateQR(factors, Ordering{3, 5, 7}); const JacobianFactor &actualJF = dynamic_cast(*actual.second); EXPECT(assert_equal(expectedFragment, *actual.first, 0.001)); @@ -589,7 +590,7 @@ TEST ( JacobianFactor, constraint_eliminate1 ) // eliminate it pair - actual = lc.eliminate(list_of(1)); + actual = lc.eliminate(Ordering{1}); // verify linear factor is a null ptr EXPECT(actual.second->empty()); @@ -617,7 +618,7 @@ TEST ( JacobianFactor, constraint_eliminate2 ) // eliminate x and verify results pair - actual = lc.eliminate(list_of(1)); + actual = lc.eliminate(Ordering{1}); // LF should be empty // It's tricky to create Eigen matrices that are only zero along one dimension @@ -648,7 +649,7 @@ TEST(JacobianFactor, OverdeterminedEliminate) { SharedDiagonal diagonal = noiseModel::Diagonal::Sigmas(sigmas); JacobianFactor factor(0, Ab.leftCols(3), Ab.col(3), diagonal); - GaussianFactorGraph::EliminationResult actual = factor.eliminate(list_of(0)); + GaussianFactorGraph::EliminationResult actual = factor.eliminate(Ordering{0}); Matrix expectedRd(3, 4); expectedRd << -2.64575131, -2.64575131, -2.64575131, -2.64575131, // diff --git a/gtsam/linear/tests/testNoiseModel.cpp b/gtsam/linear/tests/testNoiseModel.cpp index f83ba7831..442923662 100644 --- a/gtsam/linear/tests/testNoiseModel.cpp +++ b/gtsam/linear/tests/testNoiseModel.cpp @@ -22,15 +22,12 @@ #include -#include - #include #include using namespace std; using namespace gtsam; using namespace noiseModel; -using namespace boost::assign; static const double kSigma = 2, kInverseSigma = 1.0 / kSigma, kVariance = kSigma * kSigma, prc = 1.0 / kVariance; diff --git a/gtsam/linear/tests/testRegularHessianFactor.cpp b/gtsam/linear/tests/testRegularHessianFactor.cpp index 3441c6cc2..2e4d2249e 100644 --- a/gtsam/linear/tests/testRegularHessianFactor.cpp +++ b/gtsam/linear/tests/testRegularHessianFactor.cpp @@ -21,13 +21,8 @@ #include -#include -#include -#include - using namespace std; using namespace gtsam; -using namespace boost::assign; /* ************************************************************************* */ TEST(RegularHessianFactor, Constructors) @@ -36,8 +31,7 @@ TEST(RegularHessianFactor, Constructors) // 0.5*|x0 + x1 + x3 - [1;2]|^2 = 0.5*|A*x-b|^2, with A=[I I I] Matrix A1 = I_2x2, A2 = I_2x2, A3 = I_2x2; Vector2 b(1,2); - vector > terms; - terms += make_pair(0, A1), make_pair(1, A2), make_pair(3, A3); + const vector > terms {{0, A1}, {1, A2}, {3, A3}}; RegularJacobianFactor<2> jf(terms, b); // Test conversion from JacobianFactor @@ -65,8 +59,8 @@ TEST(RegularHessianFactor, Constructors) // Test n-way constructor KeyVector keys {0, 1, 3}; - vector Gs; Gs += G11, G12, G13, G22, G23, G33; - vector gs; gs += g1, g2, g3; + vector Gs {G11, G12, G13, G22, G23, G33}; + vector gs {g1, g2, g3}; RegularHessianFactor<2> factor3(keys, Gs, gs, f); EXPECT(assert_equal(factor, factor3)); @@ -82,7 +76,7 @@ TEST(RegularHessianFactor, Constructors) // Test constructor from Information matrix Matrix info = factor.augmentedInformation(); - vector dims; dims += 2, 2, 2; + vector dims {2, 2, 2}; SymmetricBlockMatrix sym(dims, info, true); RegularHessianFactor<2> factor6(keys, sym); EXPECT(assert_equal(factor, factor6)); @@ -97,10 +91,7 @@ TEST(RegularHessianFactor, Constructors) Vector Y(6); Y << 9, 12, 9, 12, 9, 12; EXPECT(assert_equal(Y,AtA*X)); - VectorValues x = map_list_of - (0, Vector2(1,2)) - (1, Vector2(3,4)) - (3, Vector2(5,6)); + VectorValues x{{0, Vector2(1, 2)}, {1, Vector2(3, 4)}, {3, Vector2(5, 6)}}; VectorValues expected; expected.insert(0, Y.segment<2>(0)); diff --git a/gtsam/linear/tests/testRegularJacobianFactor.cpp b/gtsam/linear/tests/testRegularJacobianFactor.cpp index b8c4aa689..205d9d092 100644 --- a/gtsam/linear/tests/testRegularJacobianFactor.cpp +++ b/gtsam/linear/tests/testRegularJacobianFactor.cpp @@ -24,14 +24,11 @@ #include -#include -#include #include #include using namespace std; using namespace gtsam; -using namespace boost::assign; static const size_t fixedDim = 3; static const size_t nrKeys = 3; @@ -40,10 +37,8 @@ static const size_t nrKeys = 3; namespace { namespace simple { // Terms we'll use - const vector > terms = list_of > - (make_pair(0, Matrix3::Identity())) - (make_pair(1, 2*Matrix3::Identity())) - (make_pair(2, 3*Matrix3::Identity())); + const vector > terms{ + {0, 1 * I_3x3}, {1, 2 * I_3x3}, {2, 3 * I_3x3}}; // RHS and sigmas const Vector b = (Vector(3) << 1., 2., 3.).finished(); @@ -52,10 +47,8 @@ namespace { namespace simple2 { // Terms - const vector > terms2 = list_of > - (make_pair(0, 2*Matrix3::Identity())) - (make_pair(1, 4*Matrix3::Identity())) - (make_pair(2, 6*Matrix3::Identity())); + const vector > terms2{ + {0, 2 * I_3x3}, {1, 4 * I_3x3}, {2, 6 * I_3x3}}; // RHS const Vector b2 = (Vector(3) << 2., 4., 6.).finished(); diff --git a/gtsam/linear/tests/testSerializationLinear.cpp b/gtsam/linear/tests/testSerializationLinear.cpp index ee21de364..da093db97 100644 --- a/gtsam/linear/tests/testSerializationLinear.cpp +++ b/gtsam/linear/tests/testSerializationLinear.cpp @@ -22,9 +22,6 @@ #include #include -#include -using namespace boost::assign; - #include #include @@ -228,13 +225,13 @@ TEST(Serialization, gaussian_bayes_net) { /* ************************************************************************* */ TEST (Serialization, gaussian_bayes_tree) { const Key x1=1, x2=2, x3=3, x4=4; - const Ordering chainOrdering = Ordering(list_of(x2)(x1)(x3)(x4)); + const Ordering chainOrdering {x2, x1, x3, x4}; const SharedDiagonal chainNoise = noiseModel::Isotropic::Sigma(1, 0.5); - const GaussianFactorGraph chain = list_of - (JacobianFactor(x2, (Matrix(1, 1) << 1.).finished(), x1, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise)) - (JacobianFactor(x2, (Matrix(1, 1) << 1.).finished(), x3, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise)) - (JacobianFactor(x3, (Matrix(1, 1) << 1.).finished(), x4, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise)) - (JacobianFactor(x4, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise)); + const GaussianFactorGraph chain = { + boost::make_shared(x2, (Matrix(1, 1) << 1.).finished(), x1, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise), + boost::make_shared(x2, (Matrix(1, 1) << 1.).finished(), x3, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise), + boost::make_shared(x3, (Matrix(1, 1) << 1.).finished(), x4, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise), + boost::make_shared(x4, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise)}; GaussianBayesTree init = *chain.eliminateMultifrontal(chainOrdering); GaussianBayesTree expected = *chain.eliminateMultifrontal(chainOrdering); diff --git a/gtsam/linear/tests/testSparseEigen.cpp b/gtsam/linear/tests/testSparseEigen.cpp index 225e1dab2..39c4f37e7 100644 --- a/gtsam/linear/tests/testSparseEigen.cpp +++ b/gtsam/linear/tests/testSparseEigen.cpp @@ -21,9 +21,6 @@ #include #include -#include -using boost::assign::list_of; - #include #include @@ -49,7 +46,7 @@ TEST(SparseEigen, sparseJacobianEigen) { EXPECT(assert_equal(gfg.augmentedJacobian(), Matrix(sparseResult))); // Call sparseJacobian with optional ordering... - auto ordering = Ordering(list_of(x45)(x123)); + const Ordering ordering{x45, x123}; // Eigen Sparse with optional ordering EXPECT(assert_equal(gfg.augmentedJacobian(ordering), diff --git a/gtsam/linear/tests/testVectorValues.cpp b/gtsam/linear/tests/testVectorValues.cpp index 5974f8320..49fc56d19 100644 --- a/gtsam/linear/tests/testVectorValues.cpp +++ b/gtsam/linear/tests/testVectorValues.cpp @@ -21,14 +21,11 @@ #include -#include -#include #include #include using namespace std; -using namespace boost::assign; using boost::adaptors::map_keys; using namespace gtsam;