Merge pull request #1375 from borglab/feature/Ordering_initializers
commit
f7c6f2b3f8
|
@ -18,9 +18,6 @@
|
|||
#include <gtsam/geometry/CalibratedCamera.h>
|
||||
#include <gtsam/slam/dataset.h>
|
||||
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
|
||||
using namespace boost::assign;
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
||||
|
|
|
@ -18,13 +18,11 @@
|
|||
#include <gtsam/global_includes.h>
|
||||
#include <gtsam/base/Matrix.h>
|
||||
|
||||
#include <boost/assign/list_of.hpp>
|
||||
#include <map>
|
||||
#include <iostream>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using boost::assign::list_of;
|
||||
|
||||
#ifdef GTSAM_USE_TBB
|
||||
|
||||
|
|
|
@ -56,6 +56,9 @@ public:
|
|||
/** Copy constructor from the base list class */
|
||||
FastList(const Base& x) : Base(x) {}
|
||||
|
||||
/// Construct from c++11 initializer list:
|
||||
FastList(std::initializer_list<VALUE> l) : Base(l) {}
|
||||
|
||||
#ifdef GTSAM_ALLOCATOR_BOOSTPOOL
|
||||
/** Copy constructor from a standard STL container */
|
||||
FastList(const std::list<VALUE>& x) {
|
||||
|
|
|
@ -56,15 +56,9 @@ public:
|
|||
typedef std::set<VALUE, std::less<VALUE>,
|
||||
typename internal::FastDefaultAllocator<VALUE>::type> Base;
|
||||
|
||||
/** Default constructor */
|
||||
FastSet() {
|
||||
}
|
||||
using Base::Base; // Inherit the set constructors
|
||||
|
||||
/** Constructor from a range, passes through to base class */
|
||||
template<typename INPUTITERATOR>
|
||||
explicit FastSet(INPUTITERATOR first, INPUTITERATOR last) :
|
||||
Base(first, last) {
|
||||
}
|
||||
FastSet() = default; ///< Default constructor
|
||||
|
||||
/** Constructor from a iterable container, passes through to base class */
|
||||
template<typename INPUTCONTAINER>
|
||||
|
|
|
@ -16,17 +16,14 @@
|
|||
* @brief unit tests for DSFMap
|
||||
*/
|
||||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <gtsam/base/DSFMap.h>
|
||||
|
||||
#include <boost/assign/std/list.hpp>
|
||||
#include <boost/assign/std/set.hpp>
|
||||
using namespace boost::assign;
|
||||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <iostream>
|
||||
#include <list>
|
||||
#include <map>
|
||||
#include <set>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -65,9 +62,8 @@ TEST(DSFMap, merge3) {
|
|||
TEST(DSFMap, mergePairwiseMatches) {
|
||||
|
||||
// Create some "matches"
|
||||
typedef pair<size_t,size_t> Match;
|
||||
list<Match> matches;
|
||||
matches += Match(1,2), Match(2,3), Match(4,5), Match(4,6);
|
||||
typedef std::pair<size_t, size_t> Match;
|
||||
const std::list<Match> matches{{1, 2}, {2, 3}, {4, 5}, {4, 6}};
|
||||
|
||||
// Merge matches
|
||||
DSFMap<size_t> dsf;
|
||||
|
@ -86,18 +82,17 @@ TEST(DSFMap, mergePairwiseMatches) {
|
|||
TEST(DSFMap, mergePairwiseMatches2) {
|
||||
|
||||
// Create some measurements with image index and feature index
|
||||
typedef pair<size_t,size_t> Measurement;
|
||||
typedef std::pair<size_t,size_t> Measurement;
|
||||
Measurement m11(1,1),m12(1,2),m14(1,4); // in image 1
|
||||
Measurement m22(2,2),m23(2,3),m25(2,5),m26(2,6); // in image 2
|
||||
|
||||
// Add them all
|
||||
list<Measurement> measurements;
|
||||
measurements += m11,m12,m14, m22,m23,m25,m26;
|
||||
const std::list<Measurement> measurements{m11, m12, m14, m22, m23, m25, m26};
|
||||
|
||||
// Create some "matches"
|
||||
typedef pair<Measurement,Measurement> Match;
|
||||
list<Match> matches;
|
||||
matches += Match(m11,m22), Match(m12,m23), Match(m14,m25), Match(m14,m26);
|
||||
typedef std::pair<Measurement, Measurement> Match;
|
||||
const std::list<Match> matches{
|
||||
{m11, m22}, {m12, m23}, {m14, m25}, {m14, m26}};
|
||||
|
||||
// Merge matches
|
||||
DSFMap<Measurement> dsf;
|
||||
|
@ -114,26 +109,16 @@ TEST(DSFMap, mergePairwiseMatches2) {
|
|||
/* ************************************************************************* */
|
||||
TEST(DSFMap, sets){
|
||||
// Create some "matches"
|
||||
typedef pair<size_t,size_t> Match;
|
||||
list<Match> matches;
|
||||
matches += Match(1,2), Match(2,3), Match(4,5), Match(4,6);
|
||||
using Match = std::pair<size_t,size_t>;
|
||||
const std::list<Match> matches{{1, 2}, {2, 3}, {4, 5}, {4, 6}};
|
||||
|
||||
// Merge matches
|
||||
DSFMap<size_t> dsf;
|
||||
for(const Match& m: matches)
|
||||
dsf.merge(m.first,m.second);
|
||||
|
||||
map<size_t, set<size_t> > sets = dsf.sets();
|
||||
set<size_t> s1, s2;
|
||||
s1 += 1,2,3;
|
||||
s2 += 4,5,6;
|
||||
|
||||
/*for(key_pair st: sets){
|
||||
cout << "Set " << st.first << " :{";
|
||||
for(const size_t s: st.second)
|
||||
cout << s << ", ";
|
||||
cout << "}" << endl;
|
||||
}*/
|
||||
std::map<size_t, std::set<size_t> > sets = dsf.sets();
|
||||
const std::set<size_t> s1{1, 2, 3}, s2{4, 5, 6};
|
||||
|
||||
EXPECT(s1 == sets[1]);
|
||||
EXPECT(s2 == sets[4]);
|
||||
|
|
|
@ -21,14 +21,15 @@
|
|||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/make_shared.hpp>
|
||||
#include <boost/assign/std/list.hpp>
|
||||
#include <boost/assign/std/set.hpp>
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
using namespace boost::assign;
|
||||
|
||||
#include <iostream>
|
||||
#include <set>
|
||||
#include <list>
|
||||
#include <utility>
|
||||
|
||||
using namespace std;
|
||||
using std::pair;
|
||||
using std::map;
|
||||
using std::vector;
|
||||
using namespace gtsam;
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -64,8 +65,8 @@ TEST(DSFBase, mergePairwiseMatches) {
|
|||
|
||||
// Create some "matches"
|
||||
typedef pair<size_t,size_t> Match;
|
||||
vector<Match> matches;
|
||||
matches += Match(1,2), Match(2,3), Match(4,5), Match(4,6);
|
||||
const vector<Match> matches{Match(1, 2), Match(2, 3), Match(4, 5),
|
||||
Match(4, 6)};
|
||||
|
||||
// Merge matches
|
||||
DSFBase dsf(7); // We allow for keys 0..6
|
||||
|
@ -85,7 +86,7 @@ TEST(DSFBase, mergePairwiseMatches) {
|
|||
/* ************************************************************************* */
|
||||
TEST(DSFVector, merge2) {
|
||||
boost::shared_ptr<DSFBase::V> v = boost::make_shared<DSFBase::V>(5);
|
||||
std::vector<size_t> keys; keys += 1, 3;
|
||||
const std::vector<size_t> keys {1, 3};
|
||||
DSFVector dsf(v, keys);
|
||||
dsf.merge(1,3);
|
||||
EXPECT(dsf.find(1) == dsf.find(3));
|
||||
|
@ -95,10 +96,10 @@ TEST(DSFVector, merge2) {
|
|||
TEST(DSFVector, sets) {
|
||||
DSFVector dsf(2);
|
||||
dsf.merge(0,1);
|
||||
map<size_t, set<size_t> > sets = dsf.sets();
|
||||
map<size_t, std::set<size_t> > sets = dsf.sets();
|
||||
LONGS_EQUAL(1, sets.size());
|
||||
|
||||
set<size_t> expected; expected += 0, 1;
|
||||
const std::set<size_t> expected{0, 1};
|
||||
EXPECT(expected == sets[dsf.find(0)]);
|
||||
}
|
||||
|
||||
|
@ -109,7 +110,7 @@ TEST(DSFVector, arrays) {
|
|||
map<size_t, vector<size_t> > arrays = dsf.arrays();
|
||||
LONGS_EQUAL(1, arrays.size());
|
||||
|
||||
vector<size_t> expected; expected += 0, 1;
|
||||
const vector<size_t> expected{0, 1};
|
||||
EXPECT(expected == arrays[dsf.find(0)]);
|
||||
}
|
||||
|
||||
|
@ -118,10 +119,10 @@ TEST(DSFVector, sets2) {
|
|||
DSFVector dsf(3);
|
||||
dsf.merge(0,1);
|
||||
dsf.merge(1,2);
|
||||
map<size_t, set<size_t> > sets = dsf.sets();
|
||||
map<size_t, std::set<size_t> > sets = dsf.sets();
|
||||
LONGS_EQUAL(1, sets.size());
|
||||
|
||||
set<size_t> expected; expected += 0, 1, 2;
|
||||
const std::set<size_t> expected{0, 1, 2};
|
||||
EXPECT(expected == sets[dsf.find(0)]);
|
||||
}
|
||||
|
||||
|
@ -133,7 +134,7 @@ TEST(DSFVector, arrays2) {
|
|||
map<size_t, vector<size_t> > arrays = dsf.arrays();
|
||||
LONGS_EQUAL(1, arrays.size());
|
||||
|
||||
vector<size_t> expected; expected += 0, 1, 2;
|
||||
const vector<size_t> expected{0, 1, 2};
|
||||
EXPECT(expected == arrays[dsf.find(0)]);
|
||||
}
|
||||
|
||||
|
@ -141,10 +142,10 @@ TEST(DSFVector, arrays2) {
|
|||
TEST(DSFVector, sets3) {
|
||||
DSFVector dsf(3);
|
||||
dsf.merge(0,1);
|
||||
map<size_t, set<size_t> > sets = dsf.sets();
|
||||
map<size_t, std::set<size_t> > sets = dsf.sets();
|
||||
LONGS_EQUAL(2, sets.size());
|
||||
|
||||
set<size_t> expected; expected += 0, 1;
|
||||
const std::set<size_t> expected{0, 1};
|
||||
EXPECT(expected == sets[dsf.find(0)]);
|
||||
}
|
||||
|
||||
|
@ -155,7 +156,7 @@ TEST(DSFVector, arrays3) {
|
|||
map<size_t, vector<size_t> > arrays = dsf.arrays();
|
||||
LONGS_EQUAL(2, arrays.size());
|
||||
|
||||
vector<size_t> expected; expected += 0, 1;
|
||||
const vector<size_t> expected{0, 1};
|
||||
EXPECT(expected == arrays[dsf.find(0)]);
|
||||
}
|
||||
|
||||
|
@ -163,10 +164,10 @@ TEST(DSFVector, arrays3) {
|
|||
TEST(DSFVector, set) {
|
||||
DSFVector dsf(3);
|
||||
dsf.merge(0,1);
|
||||
set<size_t> set = dsf.set(0);
|
||||
std::set<size_t> set = dsf.set(0);
|
||||
LONGS_EQUAL(2, set.size());
|
||||
|
||||
std::set<size_t> expected; expected += 0, 1;
|
||||
const std::set<size_t> expected{0, 1};
|
||||
EXPECT(expected == set);
|
||||
}
|
||||
|
||||
|
@ -175,10 +176,10 @@ TEST(DSFVector, set2) {
|
|||
DSFVector dsf(3);
|
||||
dsf.merge(0,1);
|
||||
dsf.merge(1,2);
|
||||
set<size_t> set = dsf.set(0);
|
||||
std::set<size_t> set = dsf.set(0);
|
||||
LONGS_EQUAL(3, set.size());
|
||||
|
||||
std::set<size_t> expected; expected += 0, 1, 2;
|
||||
const std::set<size_t> expected{0, 1, 2};
|
||||
EXPECT(expected == set);
|
||||
}
|
||||
|
||||
|
@ -195,13 +196,12 @@ TEST(DSFVector, isSingleton) {
|
|||
TEST(DSFVector, mergePairwiseMatches) {
|
||||
|
||||
// Create some measurements
|
||||
vector<size_t> keys;
|
||||
keys += 1,2,3,4,5,6;
|
||||
const vector<size_t> keys{1, 2, 3, 4, 5, 6};
|
||||
|
||||
// Create some "matches"
|
||||
typedef pair<size_t,size_t> Match;
|
||||
vector<Match> matches;
|
||||
matches += Match(1,2), Match(2,3), Match(4,5), Match(4,6);
|
||||
const vector<Match> matches{Match(1, 2), Match(2, 3), Match(4, 5),
|
||||
Match(4, 6)};
|
||||
|
||||
// Merge matches
|
||||
DSFVector dsf(keys);
|
||||
|
@ -209,13 +209,13 @@ TEST(DSFVector, mergePairwiseMatches) {
|
|||
dsf.merge(m.first,m.second);
|
||||
|
||||
// Check that we have two connected components, 1,2,3 and 4,5,6
|
||||
map<size_t, set<size_t> > sets = dsf.sets();
|
||||
map<size_t, std::set<size_t> > sets = dsf.sets();
|
||||
LONGS_EQUAL(2, sets.size());
|
||||
set<size_t> expected1; expected1 += 1,2,3;
|
||||
set<size_t> actual1 = sets[dsf.find(2)];
|
||||
const std::set<size_t> expected1{1, 2, 3};
|
||||
std::set<size_t> actual1 = sets[dsf.find(2)];
|
||||
EXPECT(expected1 == actual1);
|
||||
set<size_t> expected2; expected2 += 4,5,6;
|
||||
set<size_t> actual2 = sets[dsf.find(5)];
|
||||
const std::set<size_t> expected2{4, 5, 6};
|
||||
std::set<size_t> actual2 = sets[dsf.find(5)];
|
||||
EXPECT(expected2 == actual2);
|
||||
}
|
||||
|
||||
|
|
|
@ -11,12 +11,8 @@
|
|||
#include <gtsam/base/FastSet.h>
|
||||
#include <gtsam/base/FastVector.h>
|
||||
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
#include <boost/assign/std/set.hpp>
|
||||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
using namespace boost::assign;
|
||||
using namespace gtsam;
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -25,7 +21,7 @@ TEST( testFastContainers, KeySet ) {
|
|||
KeyVector init_vector {2, 3, 4, 5};
|
||||
|
||||
KeySet actSet(init_vector);
|
||||
KeySet expSet; expSet += 2, 3, 4, 5;
|
||||
KeySet expSet{2, 3, 4, 5};
|
||||
EXPECT(actSet == expSet);
|
||||
}
|
||||
|
||||
|
|
|
@ -17,14 +17,12 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <gtsam/base/SymmetricBlockMatrix.h>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using boost::assign::list_of;
|
||||
|
||||
static SymmetricBlockMatrix testBlockMatrix(
|
||||
list_of(3)(2)(1),
|
||||
std::vector<size_t>{3, 2, 1},
|
||||
(Matrix(6, 6) <<
|
||||
1, 2, 3, 4, 5, 6,
|
||||
2, 8, 9, 10, 11, 12,
|
||||
|
@ -101,7 +99,8 @@ TEST(SymmetricBlockMatrix, Ranges)
|
|||
/* ************************************************************************* */
|
||||
TEST(SymmetricBlockMatrix, expressions)
|
||||
{
|
||||
SymmetricBlockMatrix expected1(list_of(2)(3)(1), (Matrix(6, 6) <<
|
||||
const std::vector<size_t> dimensions{2, 3, 1};
|
||||
SymmetricBlockMatrix expected1(dimensions, (Matrix(6, 6) <<
|
||||
0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0,
|
||||
0, 0, 4, 6, 8, 0,
|
||||
|
@ -109,7 +108,7 @@ TEST(SymmetricBlockMatrix, expressions)
|
|||
0, 0, 0, 0, 16, 0,
|
||||
0, 0, 0, 0, 0, 0).finished());
|
||||
|
||||
SymmetricBlockMatrix expected2(list_of(2)(3)(1), (Matrix(6, 6) <<
|
||||
SymmetricBlockMatrix expected2(dimensions, (Matrix(6, 6) <<
|
||||
0, 0, 10, 15, 20, 0,
|
||||
0, 0, 12, 18, 24, 0,
|
||||
0, 0, 0, 0, 0, 0,
|
||||
|
@ -120,32 +119,32 @@ TEST(SymmetricBlockMatrix, expressions)
|
|||
Matrix a = (Matrix(1, 3) << 2, 3, 4).finished();
|
||||
Matrix b = (Matrix(1, 2) << 5, 6).finished();
|
||||
|
||||
SymmetricBlockMatrix bm1(list_of(2)(3)(1));
|
||||
SymmetricBlockMatrix bm1(dimensions);
|
||||
bm1.setZero();
|
||||
bm1.diagonalBlock(1).rankUpdate(a.transpose());
|
||||
EXPECT(assert_equal(Matrix(expected1.selfadjointView()), bm1.selfadjointView()));
|
||||
|
||||
SymmetricBlockMatrix bm2(list_of(2)(3)(1));
|
||||
SymmetricBlockMatrix bm2(dimensions);
|
||||
bm2.setZero();
|
||||
bm2.updateOffDiagonalBlock(0, 1, b.transpose() * a);
|
||||
EXPECT(assert_equal(Matrix(expected2.selfadjointView()), bm2.selfadjointView()));
|
||||
|
||||
SymmetricBlockMatrix bm3(list_of(2)(3)(1));
|
||||
SymmetricBlockMatrix bm3(dimensions);
|
||||
bm3.setZero();
|
||||
bm3.updateOffDiagonalBlock(1, 0, a.transpose() * b);
|
||||
EXPECT(assert_equal(Matrix(expected2.selfadjointView()), bm3.selfadjointView()));
|
||||
|
||||
SymmetricBlockMatrix bm4(list_of(2)(3)(1));
|
||||
SymmetricBlockMatrix bm4(dimensions);
|
||||
bm4.setZero();
|
||||
bm4.updateDiagonalBlock(1, expected1.diagonalBlock(1));
|
||||
EXPECT(assert_equal(Matrix(expected1.selfadjointView()), bm4.selfadjointView()));
|
||||
|
||||
SymmetricBlockMatrix bm5(list_of(2)(3)(1));
|
||||
SymmetricBlockMatrix bm5(dimensions);
|
||||
bm5.setZero();
|
||||
bm5.updateOffDiagonalBlock(0, 1, expected2.aboveDiagonalBlock(0, 1));
|
||||
EXPECT(assert_equal(Matrix(expected2.selfadjointView()), bm5.selfadjointView()));
|
||||
|
||||
SymmetricBlockMatrix bm6(list_of(2)(3)(1));
|
||||
SymmetricBlockMatrix bm6(dimensions);
|
||||
bm6.setZero();
|
||||
bm6.updateOffDiagonalBlock(1, 0, expected2.aboveDiagonalBlock(0, 1).transpose());
|
||||
EXPECT(assert_equal(Matrix(expected2.selfadjointView()), bm6.selfadjointView()));
|
||||
|
@ -162,7 +161,8 @@ TEST(SymmetricBlockMatrix, inverseInPlace) {
|
|||
inputMatrix += c * c.transpose();
|
||||
const Matrix expectedInverse = inputMatrix.inverse();
|
||||
|
||||
SymmetricBlockMatrix symmMatrix(list_of(2)(1), inputMatrix);
|
||||
const std::vector<size_t> dimensions{2, 1};
|
||||
SymmetricBlockMatrix symmMatrix(dimensions, inputMatrix);
|
||||
// invert in place
|
||||
symmMatrix.invertInPlace();
|
||||
EXPECT(assert_equal(expectedInverse, symmMatrix.selfadjointView()));
|
||||
|
|
|
@ -23,16 +23,13 @@
|
|||
#include <list>
|
||||
#include <boost/shared_ptr.hpp>
|
||||
#include <boost/make_shared.hpp>
|
||||
#include <boost/assign/std/list.hpp>
|
||||
|
||||
using boost::assign::operator+=;
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
||||
struct TestNode {
|
||||
typedef boost::shared_ptr<TestNode> shared_ptr;
|
||||
int data;
|
||||
vector<shared_ptr> children;
|
||||
std::vector<shared_ptr> children;
|
||||
TestNode() : data(-1) {}
|
||||
TestNode(int data) : data(data) {}
|
||||
};
|
||||
|
@ -110,10 +107,8 @@ TEST(treeTraversal, DepthFirst)
|
|||
TestForest testForest = makeTestForest();
|
||||
|
||||
// Expected visit order
|
||||
std::list<int> preOrderExpected;
|
||||
preOrderExpected += 0, 2, 3, 4, 1;
|
||||
std::list<int> postOrderExpected;
|
||||
postOrderExpected += 2, 4, 3, 0, 1;
|
||||
const std::list<int> preOrderExpected{0, 2, 3, 4, 1};
|
||||
const std::list<int> postOrderExpected{2, 4, 3, 0, 1};
|
||||
|
||||
// Actual visit order
|
||||
PreOrderVisitor preVisitor;
|
||||
|
@ -135,8 +130,7 @@ TEST(treeTraversal, CloneForest)
|
|||
testForest2.roots_ = treeTraversal::CloneForest(testForest1);
|
||||
|
||||
// Check that the original and clone both are expected
|
||||
std::list<int> preOrder1Expected;
|
||||
preOrder1Expected += 0, 2, 3, 4, 1;
|
||||
const std::list<int> preOrder1Expected{0, 2, 3, 4, 1};
|
||||
std::list<int> preOrder1Actual = getPreorder(testForest1);
|
||||
std::list<int> preOrder2Actual = getPreorder(testForest2);
|
||||
EXPECT(assert_container_equality(preOrder1Expected, preOrder1Actual));
|
||||
|
@ -144,8 +138,7 @@ TEST(treeTraversal, CloneForest)
|
|||
|
||||
// Modify clone - should not modify original
|
||||
testForest2.roots_[0]->children[1]->data = 10;
|
||||
std::list<int> preOrderModifiedExpected;
|
||||
preOrderModifiedExpected += 0, 2, 10, 4, 1;
|
||||
const std::list<int> preOrderModifiedExpected{0, 2, 10, 4, 1};
|
||||
|
||||
// Check that original is the same and only the clone is modified
|
||||
std::list<int> preOrder1ModActual = getPreorder(testForest1);
|
||||
|
|
|
@ -18,14 +18,13 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <gtsam/base/VerticalBlockMatrix.h>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
|
||||
using namespace std;
|
||||
#include<list>
|
||||
#include<vector>
|
||||
|
||||
using namespace gtsam;
|
||||
using boost::assign::list_of;
|
||||
|
||||
list<size_t> L = list_of(3)(2)(1);
|
||||
vector<size_t> dimensions(L.begin(),L.end());
|
||||
const std::vector<size_t> dimensions{3, 2, 1};
|
||||
|
||||
//*****************************************************************************
|
||||
TEST(VerticalBlockMatrix, Constructor1) {
|
||||
|
|
|
@ -22,14 +22,10 @@
|
|||
#include <gtsam/discrete/DecisionTree.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
#include <boost/format.hpp>
|
||||
#include <boost/make_shared.hpp>
|
||||
#include <boost/noncopyable.hpp>
|
||||
#include <boost/optional.hpp>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
#include <boost/type_traits/has_dereference.hpp>
|
||||
#include <boost/unordered_set.hpp>
|
||||
|
||||
#include <cmath>
|
||||
#include <fstream>
|
||||
#include <list>
|
||||
|
@ -41,8 +37,6 @@
|
|||
|
||||
namespace gtsam {
|
||||
|
||||
using boost::assign::operator+=;
|
||||
|
||||
/****************************************************************************/
|
||||
// Node
|
||||
/****************************************************************************/
|
||||
|
@ -535,8 +529,7 @@ namespace gtsam {
|
|||
template<typename L, typename Y>
|
||||
DecisionTree<L, Y>::DecisionTree(const L& label,
|
||||
const DecisionTree& f0, const DecisionTree& f1) {
|
||||
std::vector<DecisionTree> functions;
|
||||
functions += f0, f1;
|
||||
const std::vector<DecisionTree> functions{f0, f1};
|
||||
root_ = compose(functions.begin(), functions.end(), label);
|
||||
}
|
||||
|
||||
|
|
|
@ -399,13 +399,9 @@ TEST(ADT, factor_graph) {
|
|||
/* ************************************************************************* */
|
||||
// test equality
|
||||
TEST(ADT, equality_noparser) {
|
||||
DiscreteKey A(0, 2), B(1, 2);
|
||||
Signature::Table tableA, tableB;
|
||||
Signature::Row rA, rB;
|
||||
rA += 80, 20;
|
||||
rB += 60, 40;
|
||||
tableA += rA;
|
||||
tableB += rB;
|
||||
const DiscreteKey A(0, 2), B(1, 2);
|
||||
const Signature::Row rA{80, 20}, rB{60, 40};
|
||||
const Signature::Table tableA{rA}, tableB{rB};
|
||||
|
||||
// Check straight equality
|
||||
ADT pA1 = create(A % tableA);
|
||||
|
@ -520,9 +516,9 @@ TEST(ADT, elimination) {
|
|||
|
||||
// normalize
|
||||
ADT actual = f1 / actualSum;
|
||||
vector<double> cpt;
|
||||
cpt += 1.0 / 3, 2.0 / 3, 3.0 / 7, 4.0 / 7, 5.0 / 11, 6.0 / 11, //
|
||||
1.0 / 9, 8.0 / 9, 3.0 / 6, 3.0 / 6, 5.0 / 10, 5.0 / 10;
|
||||
const vector<double> cpt{
|
||||
1.0 / 3, 2.0 / 3, 3.0 / 7, 4.0 / 7, 5.0 / 11, 6.0 / 11, //
|
||||
1.0 / 9, 8.0 / 9, 3.0 / 6, 3.0 / 6, 5.0 / 10, 5.0 / 10};
|
||||
ADT expected(A & B & C, cpt);
|
||||
CHECK(assert_equal(expected, actual));
|
||||
}
|
||||
|
@ -535,9 +531,9 @@ TEST(ADT, elimination) {
|
|||
|
||||
// normalize
|
||||
ADT actual = f1 / actualSum;
|
||||
vector<double> cpt;
|
||||
cpt += 1.0 / 21, 2.0 / 21, 3.0 / 21, 4.0 / 21, 5.0 / 21, 6.0 / 21, //
|
||||
1.0 / 25, 8.0 / 25, 3.0 / 25, 3.0 / 25, 5.0 / 25, 5.0 / 25;
|
||||
const vector<double> cpt{
|
||||
1.0 / 21, 2.0 / 21, 3.0 / 21, 4.0 / 21, 5.0 / 21, 6.0 / 21, //
|
||||
1.0 / 25, 8.0 / 25, 3.0 / 25, 3.0 / 25, 5.0 / 25, 5.0 / 25};
|
||||
ADT expected(A & B & C, cpt);
|
||||
CHECK(assert_equal(expected, actual));
|
||||
}
|
||||
|
|
|
@ -26,7 +26,9 @@
|
|||
#include <gtsam/discrete/DecisionTree-inl.h>
|
||||
#include <gtsam/discrete/Signature.h>
|
||||
|
||||
using namespace std;
|
||||
using std::vector;
|
||||
using std::string;
|
||||
using std::map;
|
||||
using namespace gtsam;
|
||||
|
||||
template <typename T>
|
||||
|
@ -280,8 +282,7 @@ TEST(DecisionTree, Compose) {
|
|||
DT f1(B, DT(A, 0, 1), DT(A, 2, 3));
|
||||
|
||||
// Create from string
|
||||
vector<DT::LabelC> keys;
|
||||
keys += DT::LabelC(A, 2), DT::LabelC(B, 2);
|
||||
vector<DT::LabelC> keys{DT::LabelC(A, 2), DT::LabelC(B, 2)};
|
||||
DT f2(keys, "0 2 1 3");
|
||||
EXPECT(assert_equal(f2, f1, 1e-9));
|
||||
|
||||
|
@ -291,7 +292,7 @@ TEST(DecisionTree, Compose) {
|
|||
DOT(f4);
|
||||
|
||||
// a bigger tree
|
||||
keys += DT::LabelC(C, 2);
|
||||
keys.push_back(DT::LabelC(C, 2));
|
||||
DT f5(keys, "0 4 2 6 1 5 3 7");
|
||||
EXPECT(assert_equal(f5, f4, 1e-9));
|
||||
DOT(f5);
|
||||
|
@ -322,7 +323,7 @@ TEST(DecisionTree, Containers) {
|
|||
/* ************************************************************************** */
|
||||
// Test nrAssignments.
|
||||
TEST(DecisionTree, NrAssignments) {
|
||||
pair<string, size_t> A("A", 2), B("B", 2), C("C", 2);
|
||||
const std::pair<string, size_t> A("A", 2), B("B", 2), C("C", 2);
|
||||
DT tree({A, B, C}, "1 1 1 1 1 1 1 1");
|
||||
EXPECT(tree.root_->isLeaf());
|
||||
auto leaf = boost::dynamic_pointer_cast<const DT::Leaf>(tree.root_);
|
||||
|
@ -472,8 +473,8 @@ TEST(DecisionTree, unzip) {
|
|||
// Test thresholding.
|
||||
TEST(DecisionTree, threshold) {
|
||||
// Create three level tree
|
||||
vector<DT::LabelC> keys;
|
||||
keys += DT::LabelC("C", 2), DT::LabelC("B", 2), DT::LabelC("A", 2);
|
||||
const vector<DT::LabelC> keys{DT::LabelC("C", 2), DT::LabelC("B", 2),
|
||||
DT::LabelC("A", 2)};
|
||||
DT tree(keys, "0 1 2 3 4 5 6 7");
|
||||
|
||||
// Check number of leaves equal to zero
|
||||
|
@ -495,8 +496,8 @@ TEST(DecisionTree, threshold) {
|
|||
// Test apply with assignment.
|
||||
TEST(DecisionTree, ApplyWithAssignment) {
|
||||
// Create three level tree
|
||||
vector<DT::LabelC> keys;
|
||||
keys += DT::LabelC("C", 2), DT::LabelC("B", 2), DT::LabelC("A", 2);
|
||||
const vector<DT::LabelC> keys{DT::LabelC("C", 2), DT::LabelC("B", 2),
|
||||
DT::LabelC("A", 2)};
|
||||
DT tree(keys, "1 2 3 4 5 6 7 8");
|
||||
|
||||
DecisionTree<string, double> probTree(
|
||||
|
|
|
@ -53,12 +53,8 @@ TEST(DiscreteConditional, constructors) {
|
|||
TEST(DiscreteConditional, constructors_alt_interface) {
|
||||
DiscreteKey X(0, 2), Y(2, 3), Z(1, 2); // watch ordering !
|
||||
|
||||
Signature::Table table;
|
||||
Signature::Row r1, r2, r3;
|
||||
r1 += 1.0, 1.0;
|
||||
r2 += 2.0, 3.0;
|
||||
r3 += 1.0, 4.0;
|
||||
table += r1, r2, r3;
|
||||
const Signature::Row r1{1, 1}, r2{2, 3}, r3{1, 4};
|
||||
const Signature::Table table{r1, r2, r3};
|
||||
DiscreteConditional actual1(X, {Y}, table);
|
||||
|
||||
DecisionTreeFactor f1(X & Y, "0.5 0.4 0.2 0.5 0.6 0.8");
|
||||
|
|
|
@ -183,8 +183,7 @@ TEST_UNSAFE(DiscreteMarginals, truss2) {
|
|||
F[j] /= sum;
|
||||
|
||||
// Marginals
|
||||
vector<double> table;
|
||||
table += F[j], T[j];
|
||||
const vector<double> table{F[j], T[j]};
|
||||
DecisionTreeFactor expectedM(key[j], table);
|
||||
DiscreteFactor::shared_ptr actualM = marginals(j);
|
||||
EXPECT(assert_equal(
|
||||
|
|
|
@ -18,12 +18,13 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/geometry/Point3.h>
|
||||
#include <gtsam/geometry/CalibratedCamera.h> // for Cheirality exception
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/base/SymmetricBlockMatrix.h>
|
||||
#include <gtsam/base/FastMap.h>
|
||||
#include <gtsam/base/SymmetricBlockMatrix.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/geometry/CalibratedCamera.h> // for Cheirality exception
|
||||
#include <gtsam/geometry/Point3.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
|
||||
#include <vector>
|
||||
|
||||
namespace gtsam {
|
||||
|
@ -31,10 +32,10 @@ namespace gtsam {
|
|||
/**
|
||||
* @brief A set of cameras, all with their own calibration
|
||||
*/
|
||||
template<class CAMERA>
|
||||
class CameraSet : public std::vector<CAMERA, Eigen::aligned_allocator<CAMERA> > {
|
||||
|
||||
protected:
|
||||
template <class CAMERA>
|
||||
class CameraSet : public std::vector<CAMERA, Eigen::aligned_allocator<CAMERA>> {
|
||||
protected:
|
||||
using Base = std::vector<CAMERA, typename Eigen::aligned_allocator<CAMERA>>;
|
||||
|
||||
/**
|
||||
* 2D measurement and noise model for each of the m views
|
||||
|
@ -43,13 +44,11 @@ protected:
|
|||
typedef typename CAMERA::Measurement Z;
|
||||
typedef typename CAMERA::MeasurementVector ZVector;
|
||||
|
||||
static const int D = traits<CAMERA>::dimension; ///< Camera dimension
|
||||
static const int ZDim = traits<Z>::dimension; ///< Measurement dimension
|
||||
static const int D = traits<CAMERA>::dimension; ///< Camera dimension
|
||||
static const int ZDim = traits<Z>::dimension; ///< Measurement dimension
|
||||
|
||||
/// Make a vector of re-projection errors
|
||||
static Vector ErrorVector(const ZVector& predicted,
|
||||
const ZVector& measured) {
|
||||
|
||||
static Vector ErrorVector(const ZVector& predicted, const ZVector& measured) {
|
||||
// Check size
|
||||
size_t m = predicted.size();
|
||||
if (measured.size() != m)
|
||||
|
@ -59,7 +58,8 @@ protected:
|
|||
Vector b(ZDim * m);
|
||||
for (size_t i = 0, row = 0; i < m; i++, row += ZDim) {
|
||||
Vector bi = traits<Z>::Local(measured[i], predicted[i]);
|
||||
if(ZDim==3 && std::isnan(bi(1))){ // if it is a stereo point and the right pixel is missing (nan)
|
||||
if (ZDim == 3 && std::isnan(bi(1))) { // if it is a stereo point and the
|
||||
// right pixel is missing (nan)
|
||||
bi(1) = 0;
|
||||
}
|
||||
b.segment<ZDim>(row) = bi;
|
||||
|
@ -67,7 +67,8 @@ protected:
|
|||
return b;
|
||||
}
|
||||
|
||||
public:
|
||||
public:
|
||||
using Base::Base; // Inherit the vector constructors
|
||||
|
||||
/// Destructor
|
||||
virtual ~CameraSet() = default;
|
||||
|
@ -83,18 +84,15 @@ public:
|
|||
*/
|
||||
virtual void print(const std::string& s = "") const {
|
||||
std::cout << s << "CameraSet, cameras = \n";
|
||||
for (size_t k = 0; k < this->size(); ++k)
|
||||
this->at(k).print(s);
|
||||
for (size_t k = 0; k < this->size(); ++k) this->at(k).print(s);
|
||||
}
|
||||
|
||||
/// equals
|
||||
bool equals(const CameraSet& p, double tol = 1e-9) const {
|
||||
if (this->size() != p.size())
|
||||
return false;
|
||||
if (this->size() != p.size()) return false;
|
||||
bool camerasAreEqual = true;
|
||||
for (size_t i = 0; i < this->size(); i++) {
|
||||
if (this->at(i).equals(p.at(i), tol) == false)
|
||||
camerasAreEqual = false;
|
||||
if (this->at(i).equals(p.at(i), tol) == false) camerasAreEqual = false;
|
||||
break;
|
||||
}
|
||||
return camerasAreEqual;
|
||||
|
@ -106,11 +104,10 @@ public:
|
|||
* matrix this function returns the diagonal blocks.
|
||||
* throws CheiralityException
|
||||
*/
|
||||
template<class POINT>
|
||||
ZVector project2(const POINT& point, //
|
||||
boost::optional<FBlocks&> Fs = boost::none, //
|
||||
boost::optional<Matrix&> E = boost::none) const {
|
||||
|
||||
template <class POINT>
|
||||
ZVector project2(const POINT& point, //
|
||||
boost::optional<FBlocks&> Fs = boost::none, //
|
||||
boost::optional<Matrix&> E = boost::none) const {
|
||||
static const int N = FixedDimension<POINT>::value;
|
||||
|
||||
// Allocate result
|
||||
|
@ -135,19 +132,19 @@ public:
|
|||
}
|
||||
|
||||
/// Calculate vector [project2(point)-z] of re-projection errors
|
||||
template<class POINT>
|
||||
template <class POINT>
|
||||
Vector reprojectionError(const POINT& point, const ZVector& measured,
|
||||
boost::optional<FBlocks&> Fs = boost::none, //
|
||||
boost::optional<Matrix&> E = boost::none) const {
|
||||
boost::optional<FBlocks&> Fs = boost::none, //
|
||||
boost::optional<Matrix&> E = boost::none) const {
|
||||
return ErrorVector(project2(point, Fs, E), measured);
|
||||
}
|
||||
|
||||
/**
|
||||
* Do Schur complement, given Jacobian as Fs,E,P, return SymmetricBlockMatrix
|
||||
* G = F' * F - F' * E * P * E' * F
|
||||
* g = F' * (b - E * P * E' * b)
|
||||
* Fixed size version
|
||||
*/
|
||||
* Do Schur complement, given Jacobian as Fs,E,P, return SymmetricBlockMatrix
|
||||
* G = F' * F - F' * E * P * E' * F
|
||||
* g = F' * (b - E * P * E' * b)
|
||||
* Fixed size version
|
||||
*/
|
||||
template <int N,
|
||||
int ND> // N = 2 or 3 (point dimension), ND is the camera dimension
|
||||
static SymmetricBlockMatrix SchurComplement(
|
||||
|
@ -158,38 +155,47 @@ public:
|
|||
// a single point is observed in m cameras
|
||||
size_t m = Fs.size();
|
||||
|
||||
// Create a SymmetricBlockMatrix (augmented hessian, with extra row/column with info vector)
|
||||
// Create a SymmetricBlockMatrix (augmented hessian, with extra row/column
|
||||
// with info vector)
|
||||
size_t M1 = ND * m + 1;
|
||||
std::vector<DenseIndex> dims(m + 1); // this also includes the b term
|
||||
std::vector<DenseIndex> dims(m + 1); // this also includes the b term
|
||||
std::fill(dims.begin(), dims.end() - 1, ND);
|
||||
dims.back() = 1;
|
||||
SymmetricBlockMatrix augmentedHessian(dims, Matrix::Zero(M1, M1));
|
||||
|
||||
// Blockwise Schur complement
|
||||
for (size_t i = 0; i < m; i++) { // for each camera
|
||||
for (size_t i = 0; i < m; i++) { // for each camera
|
||||
|
||||
const Eigen::Matrix<double, ZDim, ND>& Fi = Fs[i];
|
||||
const auto FiT = Fi.transpose();
|
||||
const Eigen::Matrix<double, ZDim, N> Ei_P = //
|
||||
const Eigen::Matrix<double, ZDim, N> Ei_P = //
|
||||
E.block(ZDim * i, 0, ZDim, N) * P;
|
||||
|
||||
// D = (Dx2) * ZDim
|
||||
augmentedHessian.setOffDiagonalBlock(i, m, FiT * b.segment<ZDim>(ZDim * i) // F' * b
|
||||
- FiT * (Ei_P * (E.transpose() * b))); // D = (DxZDim) * (ZDimx3) * (N*ZDimm) * (ZDimm x 1)
|
||||
augmentedHessian.setOffDiagonalBlock(
|
||||
i, m,
|
||||
FiT * b.segment<ZDim>(ZDim * i) // F' * b
|
||||
-
|
||||
FiT *
|
||||
(Ei_P *
|
||||
(E.transpose() *
|
||||
b))); // D = (DxZDim) * (ZDimx3) * (N*ZDimm) * (ZDimm x 1)
|
||||
|
||||
// (DxD) = (DxZDim) * ( (ZDimxD) - (ZDimx3) * (3xZDim) * (ZDimxD) )
|
||||
augmentedHessian.setDiagonalBlock(i, FiT
|
||||
* (Fi - Ei_P * E.block(ZDim * i, 0, ZDim, N).transpose() * Fi));
|
||||
augmentedHessian.setDiagonalBlock(
|
||||
i,
|
||||
FiT * (Fi - Ei_P * E.block(ZDim * i, 0, ZDim, N).transpose() * Fi));
|
||||
|
||||
// upper triangular part of the hessian
|
||||
for (size_t j = i + 1; j < m; j++) { // for each camera
|
||||
for (size_t j = i + 1; j < m; j++) { // for each camera
|
||||
const Eigen::Matrix<double, ZDim, ND>& Fj = Fs[j];
|
||||
|
||||
// (DxD) = (Dx2) * ( (2x2) * (2xD) )
|
||||
augmentedHessian.setOffDiagonalBlock(i, j, -FiT
|
||||
* (Ei_P * E.block(ZDim * j, 0, ZDim, N).transpose() * Fj));
|
||||
augmentedHessian.setOffDiagonalBlock(
|
||||
i, j,
|
||||
-FiT * (Ei_P * E.block(ZDim * j, 0, ZDim, N).transpose() * Fj));
|
||||
}
|
||||
} // end of for over cameras
|
||||
} // end of for over cameras
|
||||
|
||||
augmentedHessian.diagonalBlock(m)(0, 0) += b.squaredNorm();
|
||||
return augmentedHessian;
|
||||
|
@ -297,20 +303,21 @@ public:
|
|||
* g = F' * (b - E * P * E' * b)
|
||||
* Fixed size version
|
||||
*/
|
||||
template<int N> // N = 2 or 3
|
||||
static SymmetricBlockMatrix SchurComplement(const FBlocks& Fs,
|
||||
const Matrix& E, const Eigen::Matrix<double, N, N>& P, const Vector& b) {
|
||||
return SchurComplement<N,D>(Fs, E, P, b);
|
||||
template <int N> // N = 2 or 3
|
||||
static SymmetricBlockMatrix SchurComplement(
|
||||
const FBlocks& Fs, const Matrix& E, const Eigen::Matrix<double, N, N>& P,
|
||||
const Vector& b) {
|
||||
return SchurComplement<N, D>(Fs, E, P, b);
|
||||
}
|
||||
|
||||
/// Computes Point Covariance P, with lambda parameter
|
||||
template<int N> // N = 2 or 3 (point dimension)
|
||||
template <int N> // N = 2 or 3 (point dimension)
|
||||
static void ComputePointCovariance(Eigen::Matrix<double, N, N>& P,
|
||||
const Matrix& E, double lambda, bool diagonalDamping = false) {
|
||||
|
||||
const Matrix& E, double lambda,
|
||||
bool diagonalDamping = false) {
|
||||
Matrix EtE = E.transpose() * E;
|
||||
|
||||
if (diagonalDamping) { // diagonal of the hessian
|
||||
if (diagonalDamping) { // diagonal of the hessian
|
||||
EtE.diagonal() += lambda * EtE.diagonal();
|
||||
} else {
|
||||
DenseIndex n = E.cols();
|
||||
|
@ -322,7 +329,7 @@ public:
|
|||
|
||||
/// Computes Point Covariance P, with lambda parameter, dynamic version
|
||||
static Matrix PointCov(const Matrix& E, const double lambda = 0.0,
|
||||
bool diagonalDamping = false) {
|
||||
bool diagonalDamping = false) {
|
||||
if (E.cols() == 2) {
|
||||
Matrix2 P2;
|
||||
ComputePointCovariance<2>(P2, E, lambda, diagonalDamping);
|
||||
|
@ -339,8 +346,9 @@ public:
|
|||
* Dynamic version
|
||||
*/
|
||||
static SymmetricBlockMatrix SchurComplement(const FBlocks& Fblocks,
|
||||
const Matrix& E, const Vector& b, const double lambda = 0.0,
|
||||
bool diagonalDamping = false) {
|
||||
const Matrix& E, const Vector& b,
|
||||
const double lambda = 0.0,
|
||||
bool diagonalDamping = false) {
|
||||
if (E.cols() == 2) {
|
||||
Matrix2 P;
|
||||
ComputePointCovariance<2>(P, E, lambda, diagonalDamping);
|
||||
|
@ -353,17 +361,17 @@ public:
|
|||
}
|
||||
|
||||
/**
|
||||
* Applies Schur complement (exploiting block structure) to get a smart factor on cameras,
|
||||
* and adds the contribution of the smart factor to a pre-allocated augmented Hessian.
|
||||
* Applies Schur complement (exploiting block structure) to get a smart factor
|
||||
* on cameras, and adds the contribution of the smart factor to a
|
||||
* pre-allocated augmented Hessian.
|
||||
*/
|
||||
template<int N> // N = 2 or 3 (point dimension)
|
||||
static void UpdateSchurComplement(const FBlocks& Fs, const Matrix& E,
|
||||
const Eigen::Matrix<double, N, N>& P, const Vector& b,
|
||||
const KeyVector& allKeys, const KeyVector& keys,
|
||||
/*output ->*/SymmetricBlockMatrix& augmentedHessian) {
|
||||
|
||||
assert(keys.size()==Fs.size());
|
||||
assert(keys.size()<=allKeys.size());
|
||||
template <int N> // N = 2 or 3 (point dimension)
|
||||
static void UpdateSchurComplement(
|
||||
const FBlocks& Fs, const Matrix& E, const Eigen::Matrix<double, N, N>& P,
|
||||
const Vector& b, const KeyVector& allKeys, const KeyVector& keys,
|
||||
/*output ->*/ SymmetricBlockMatrix& augmentedHessian) {
|
||||
assert(keys.size() == Fs.size());
|
||||
assert(keys.size() <= allKeys.size());
|
||||
|
||||
FastMap<Key, size_t> KeySlotMap;
|
||||
for (size_t slot = 0; slot < allKeys.size(); slot++)
|
||||
|
@ -374,39 +382,49 @@ public:
|
|||
// g = F' * (b - E * P * E' * b)
|
||||
|
||||
// a single point is observed in m cameras
|
||||
size_t m = Fs.size(); // cameras observing current point
|
||||
size_t M = (augmentedHessian.rows() - 1) / D; // all cameras in the group
|
||||
assert(allKeys.size()==M);
|
||||
size_t m = Fs.size(); // cameras observing current point
|
||||
size_t M = (augmentedHessian.rows() - 1) / D; // all cameras in the group
|
||||
assert(allKeys.size() == M);
|
||||
|
||||
// Blockwise Schur complement
|
||||
for (size_t i = 0; i < m; i++) { // for each camera in the current factor
|
||||
for (size_t i = 0; i < m; i++) { // for each camera in the current factor
|
||||
|
||||
const MatrixZD& Fi = Fs[i];
|
||||
const auto FiT = Fi.transpose();
|
||||
const Eigen::Matrix<double, 2, N> Ei_P = E.template block<ZDim, N>(
|
||||
ZDim * i, 0) * P;
|
||||
const Eigen::Matrix<double, 2, N> Ei_P =
|
||||
E.template block<ZDim, N>(ZDim * i, 0) * P;
|
||||
|
||||
// D = (DxZDim) * (ZDim)
|
||||
// allKeys are the list of all camera keys in the group, e.g, (1,3,4,5,7)
|
||||
// we should map those to a slot in the local (grouped) hessian (0,1,2,3,4)
|
||||
// Key cameraKey_i = this->keys_[i];
|
||||
// we should map those to a slot in the local (grouped) hessian
|
||||
// (0,1,2,3,4) Key cameraKey_i = this->keys_[i];
|
||||
DenseIndex aug_i = KeySlotMap.at(keys[i]);
|
||||
|
||||
// information vector - store previous vector
|
||||
// vectorBlock = augmentedHessian(aug_i, aug_m).knownOffDiagonal();
|
||||
// add contribution of current factor
|
||||
augmentedHessian.updateOffDiagonalBlock(aug_i, M,
|
||||
FiT * b.segment<ZDim>(ZDim * i) // F' * b
|
||||
- FiT * (Ei_P * (E.transpose() * b))); // D = (DxZDim) * (ZDimx3) * (N*ZDimm) * (ZDimm x 1)
|
||||
augmentedHessian.updateOffDiagonalBlock(
|
||||
aug_i, M,
|
||||
FiT * b.segment<ZDim>(ZDim * i) // F' * b
|
||||
-
|
||||
FiT *
|
||||
(Ei_P *
|
||||
(E.transpose() *
|
||||
b))); // D = (DxZDim) * (ZDimx3) * (N*ZDimm) * (ZDimm x 1)
|
||||
|
||||
// (DxD) += (DxZDim) * ( (ZDimxD) - (ZDimx3) * (3xZDim) * (ZDimxD) )
|
||||
// add contribution of current factor
|
||||
// TODO(gareth): Eigen doesn't let us pass the expression. Call eval() for now...
|
||||
augmentedHessian.updateDiagonalBlock(aug_i,
|
||||
((FiT * (Fi - Ei_P * E.template block<ZDim, N>(ZDim * i, 0).transpose() * Fi))).eval());
|
||||
// TODO(gareth): Eigen doesn't let us pass the expression. Call eval() for
|
||||
// now...
|
||||
augmentedHessian.updateDiagonalBlock(
|
||||
aug_i,
|
||||
((FiT *
|
||||
(Fi -
|
||||
Ei_P * E.template block<ZDim, N>(ZDim * i, 0).transpose() * Fi)))
|
||||
.eval());
|
||||
|
||||
// upper triangular part of the hessian
|
||||
for (size_t j = i + 1; j < m; j++) { // for each camera
|
||||
for (size_t j = i + 1; j < m; j++) { // for each camera
|
||||
const MatrixZD& Fj = Fs[j];
|
||||
|
||||
DenseIndex aug_j = KeySlotMap.at(keys[j]);
|
||||
|
@ -415,39 +433,38 @@ public:
|
|||
// off diagonal block - store previous block
|
||||
// matrixBlock = augmentedHessian(aug_i, aug_j).knownOffDiagonal();
|
||||
// add contribution of current factor
|
||||
augmentedHessian.updateOffDiagonalBlock(aug_i, aug_j,
|
||||
-FiT * (Ei_P * E.template block<ZDim, N>(ZDim * j, 0).transpose() * Fj));
|
||||
augmentedHessian.updateOffDiagonalBlock(
|
||||
aug_i, aug_j,
|
||||
-FiT * (Ei_P * E.template block<ZDim, N>(ZDim * j, 0).transpose() *
|
||||
Fj));
|
||||
}
|
||||
} // end of for over cameras
|
||||
} // end of for over cameras
|
||||
|
||||
augmentedHessian.diagonalBlock(M)(0, 0) += b.squaredNorm();
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
private:
|
||||
/// Serialization function
|
||||
friend class boost::serialization::access;
|
||||
template<class ARCHIVE>
|
||||
void serialize(ARCHIVE & ar, const unsigned int /*version*/) {
|
||||
ar & (*this);
|
||||
template <class ARCHIVE>
|
||||
void serialize(ARCHIVE& ar, const unsigned int /*version*/) {
|
||||
ar&(*this);
|
||||
}
|
||||
|
||||
public:
|
||||
public:
|
||||
GTSAM_MAKE_ALIGNED_OPERATOR_NEW
|
||||
};
|
||||
|
||||
template<class CAMERA>
|
||||
template <class CAMERA>
|
||||
const int CameraSet<CAMERA>::D;
|
||||
|
||||
template<class CAMERA>
|
||||
template <class CAMERA>
|
||||
const int CameraSet<CAMERA>::ZDim;
|
||||
|
||||
template<class CAMERA>
|
||||
struct traits<CameraSet<CAMERA> > : public Testable<CameraSet<CAMERA> > {
|
||||
};
|
||||
template <class CAMERA>
|
||||
struct traits<CameraSet<CAMERA>> : public Testable<CameraSet<CAMERA>> {};
|
||||
|
||||
template<class CAMERA>
|
||||
struct traits<const CameraSet<CAMERA> > : public Testable<CameraSet<CAMERA> > {
|
||||
};
|
||||
template <class CAMERA>
|
||||
struct traits<const CameraSet<CAMERA>> : public Testable<CameraSet<CAMERA>> {};
|
||||
|
||||
} // \ namespace gtsam
|
||||
} // namespace gtsam
|
||||
|
|
|
@ -20,12 +20,9 @@
|
|||
#include <gtsam/geometry/OrientedPlane3.h>
|
||||
#include <gtsam/base/numericalDerivative.h>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
|
||||
using namespace boost::assign;
|
||||
using namespace std::placeholders;
|
||||
using namespace gtsam;
|
||||
using namespace std;
|
||||
using boost::none;
|
||||
|
||||
GTSAM_CONCEPT_TESTABLE_INST(OrientedPlane3)
|
||||
|
|
|
@ -23,12 +23,10 @@
|
|||
#include <gtsam/geometry/Pose2.h>
|
||||
#include <gtsam/geometry/Rot2.h>
|
||||
|
||||
#include <boost/assign/std/vector.hpp> // for operator +=
|
||||
#include <boost/optional.hpp>
|
||||
#include <cmath>
|
||||
#include <iostream>
|
||||
|
||||
using namespace boost::assign;
|
||||
using namespace gtsam;
|
||||
using namespace std;
|
||||
|
||||
|
@ -749,11 +747,10 @@ namespace align_3 {
|
|||
TEST(Pose2, align_3) {
|
||||
using namespace align_3;
|
||||
|
||||
Point2Pairs ab_pairs;
|
||||
Point2Pair ab1(make_pair(a1, b1));
|
||||
Point2Pair ab2(make_pair(a2, b2));
|
||||
Point2Pair ab3(make_pair(a3, b3));
|
||||
ab_pairs += ab1, ab2, ab3;
|
||||
const Point2Pairs ab_pairs{ab1, ab2, ab3};
|
||||
|
||||
boost::optional<Pose2> aTb = Pose2::Align(ab_pairs);
|
||||
EXPECT(assert_equal(expected, *aTb));
|
||||
|
@ -778,9 +775,7 @@ namespace {
|
|||
TEST(Pose2, align_4) {
|
||||
using namespace align_3;
|
||||
|
||||
Point2Vector as, bs;
|
||||
as += a1, a2, a3;
|
||||
bs += b3, b1, b2; // note in 3,1,2 order !
|
||||
Point2Vector as{a1, a2, a3}, bs{b3, b1, b2}; // note in 3,1,2 order !
|
||||
|
||||
Triangle t1; t1.i_=0; t1.j_=1; t1.k_=2;
|
||||
Triangle t2; t2.i_=1; t2.j_=2; t2.k_=0;
|
||||
|
|
|
@ -20,15 +20,13 @@
|
|||
#include <gtsam/base/lieProxies.h>
|
||||
#include <gtsam/base/TestableAssertions.h>
|
||||
|
||||
#include <boost/assign/std/vector.hpp> // for operator +=
|
||||
using namespace boost::assign;
|
||||
using namespace std::placeholders;
|
||||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <cmath>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using namespace std::placeholders;
|
||||
|
||||
GTSAM_CONCEPT_TESTABLE_INST(Pose3)
|
||||
GTSAM_CONCEPT_LIE_INST(Pose3)
|
||||
|
@ -809,11 +807,10 @@ TEST( Pose3, adjointMap) {
|
|||
TEST(Pose3, Align1) {
|
||||
Pose3 expected(Rot3(), Point3(10,10,0));
|
||||
|
||||
vector<Point3Pair> correspondences;
|
||||
Point3Pair ab1(make_pair(Point3(10,10,0), Point3(0,0,0)));
|
||||
Point3Pair ab2(make_pair(Point3(30,20,0), Point3(20,10,0)));
|
||||
Point3Pair ab3(make_pair(Point3(20,30,0), Point3(10,20,0)));
|
||||
correspondences += ab1, ab2, ab3;
|
||||
Point3Pair ab1(Point3(10,10,0), Point3(0,0,0));
|
||||
Point3Pair ab2(Point3(30,20,0), Point3(20,10,0));
|
||||
Point3Pair ab3(Point3(20,30,0), Point3(10,20,0));
|
||||
const vector<Point3Pair> correspondences{ab1, ab2, ab3};
|
||||
|
||||
boost::optional<Pose3> actual = Pose3::Align(correspondences);
|
||||
EXPECT(assert_equal(expected, *actual));
|
||||
|
@ -825,15 +822,12 @@ TEST(Pose3, Align2) {
|
|||
Rot3 R = Rot3::RzRyRx(0.3, 0.2, 0.1);
|
||||
Pose3 expected(R, t);
|
||||
|
||||
vector<Point3Pair> correspondences;
|
||||
Point3 p1(0,0,1), p2(10,0,2), p3(20,-10,30);
|
||||
Point3 q1 = expected.transformFrom(p1),
|
||||
q2 = expected.transformFrom(p2),
|
||||
q3 = expected.transformFrom(p3);
|
||||
Point3Pair ab1(make_pair(q1, p1));
|
||||
Point3Pair ab2(make_pair(q2, p2));
|
||||
Point3Pair ab3(make_pair(q3, p3));
|
||||
correspondences += ab1, ab2, ab3;
|
||||
const Point3Pair ab1{q1, p1}, ab2{q2, p2}, ab3{q3, p3};
|
||||
const vector<Point3Pair> correspondences{ab1, ab2, ab3};
|
||||
|
||||
boost::optional<Pose3> actual = Pose3::Align(correspondences);
|
||||
EXPECT(assert_equal(expected, *actual, 1e-5));
|
||||
|
|
|
@ -30,12 +30,8 @@
|
|||
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
|
||||
#include <gtsam/slam/StereoFactor.h>
|
||||
|
||||
#include <boost/assign.hpp>
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using namespace boost::assign;
|
||||
|
||||
// Some common constants
|
||||
|
||||
|
@ -51,34 +47,34 @@ static const PinholeCamera<Cal3_S2> kCamera1(kPose1, *kSharedCal);
|
|||
static const Pose3 kPose2 = kPose1 * Pose3(Rot3(), Point3(1, 0, 0));
|
||||
static const PinholeCamera<Cal3_S2> kCamera2(kPose2, *kSharedCal);
|
||||
|
||||
// landmark ~5 meters infront of camera
|
||||
static const std::vector<Pose3> kPoses = {kPose1, kPose2};
|
||||
|
||||
|
||||
// landmark ~5 meters in front of camera
|
||||
static const Point3 kLandmark(5, 0.5, 1.2);
|
||||
|
||||
// 1. Project two landmarks into two cameras and triangulate
|
||||
static const Point2 kZ1 = kCamera1.project(kLandmark);
|
||||
static const Point2 kZ2 = kCamera2.project(kLandmark);
|
||||
static const Point2Vector kMeasurements{kZ1, kZ2};
|
||||
|
||||
//******************************************************************************
|
||||
// Simple test with a well-behaved two camera situation
|
||||
TEST(triangulation, twoPoses) {
|
||||
vector<Pose3> poses;
|
||||
Point2Vector measurements;
|
||||
|
||||
poses += kPose1, kPose2;
|
||||
measurements += kZ1, kZ2;
|
||||
Point2Vector measurements = kMeasurements;
|
||||
|
||||
double rank_tol = 1e-9;
|
||||
|
||||
// 1. Test simple DLT, perfect in no noise situation
|
||||
bool optimize = false;
|
||||
boost::optional<Point3> actual1 = //
|
||||
triangulatePoint3<Cal3_S2>(poses, kSharedCal, measurements, rank_tol, optimize);
|
||||
triangulatePoint3<Cal3_S2>(kPoses, kSharedCal, measurements, rank_tol, optimize);
|
||||
EXPECT(assert_equal(kLandmark, *actual1, 1e-7));
|
||||
|
||||
// 2. test with optimization on, same answer
|
||||
optimize = true;
|
||||
boost::optional<Point3> actual2 = //
|
||||
triangulatePoint3<Cal3_S2>(poses, kSharedCal, measurements, rank_tol, optimize);
|
||||
triangulatePoint3<Cal3_S2>(kPoses, kSharedCal, measurements, rank_tol, optimize);
|
||||
EXPECT(assert_equal(kLandmark, *actual2, 1e-7));
|
||||
|
||||
// 3. Add some noise and try again: result should be ~ (4.995,
|
||||
|
@ -87,13 +83,13 @@ TEST(triangulation, twoPoses) {
|
|||
measurements.at(1) += Point2(-0.2, 0.3);
|
||||
optimize = false;
|
||||
boost::optional<Point3> actual3 = //
|
||||
triangulatePoint3<Cal3_S2>(poses, kSharedCal, measurements, rank_tol, optimize);
|
||||
triangulatePoint3<Cal3_S2>(kPoses, kSharedCal, measurements, rank_tol, optimize);
|
||||
EXPECT(assert_equal(Point3(4.995, 0.499167, 1.19814), *actual3, 1e-4));
|
||||
|
||||
// 4. Now with optimization on
|
||||
optimize = true;
|
||||
boost::optional<Point3> actual4 = //
|
||||
triangulatePoint3<Cal3_S2>(poses, kSharedCal, measurements, rank_tol, optimize);
|
||||
triangulatePoint3<Cal3_S2>(kPoses, kSharedCal, measurements, rank_tol, optimize);
|
||||
EXPECT(assert_equal(Point3(4.995, 0.499167, 1.19814), *actual4, 1e-4));
|
||||
}
|
||||
|
||||
|
@ -102,7 +98,7 @@ TEST(triangulation, twoCamerasUsingLOST) {
|
|||
cameras.push_back(kCamera1);
|
||||
cameras.push_back(kCamera2);
|
||||
|
||||
Point2Vector measurements = {kZ1, kZ2};
|
||||
Point2Vector measurements = kMeasurements;
|
||||
SharedNoiseModel measurementNoise = noiseModel::Isotropic::Sigma(2, 1e-4);
|
||||
double rank_tol = 1e-9;
|
||||
|
||||
|
@ -175,25 +171,21 @@ TEST(triangulation, twoPosesCal3DS2) {
|
|||
Point2 z1Distorted = camera1Distorted.project(kLandmark);
|
||||
Point2 z2Distorted = camera2Distorted.project(kLandmark);
|
||||
|
||||
vector<Pose3> poses;
|
||||
Point2Vector measurements;
|
||||
|
||||
poses += kPose1, kPose2;
|
||||
measurements += z1Distorted, z2Distorted;
|
||||
Point2Vector measurements{z1Distorted, z2Distorted};
|
||||
|
||||
double rank_tol = 1e-9;
|
||||
|
||||
// 1. Test simple DLT, perfect in no noise situation
|
||||
bool optimize = false;
|
||||
boost::optional<Point3> actual1 = //
|
||||
triangulatePoint3<Cal3DS2>(poses, sharedDistortedCal, measurements,
|
||||
triangulatePoint3<Cal3DS2>(kPoses, sharedDistortedCal, measurements,
|
||||
rank_tol, optimize);
|
||||
EXPECT(assert_equal(kLandmark, *actual1, 1e-7));
|
||||
|
||||
// 2. test with optimization on, same answer
|
||||
optimize = true;
|
||||
boost::optional<Point3> actual2 = //
|
||||
triangulatePoint3<Cal3DS2>(poses, sharedDistortedCal, measurements,
|
||||
triangulatePoint3<Cal3DS2>(kPoses, sharedDistortedCal, measurements,
|
||||
rank_tol, optimize);
|
||||
EXPECT(assert_equal(kLandmark, *actual2, 1e-7));
|
||||
|
||||
|
@ -203,14 +195,14 @@ TEST(triangulation, twoPosesCal3DS2) {
|
|||
measurements.at(1) += Point2(-0.2, 0.3);
|
||||
optimize = false;
|
||||
boost::optional<Point3> actual3 = //
|
||||
triangulatePoint3<Cal3DS2>(poses, sharedDistortedCal, measurements,
|
||||
triangulatePoint3<Cal3DS2>(kPoses, sharedDistortedCal, measurements,
|
||||
rank_tol, optimize);
|
||||
EXPECT(assert_equal(Point3(4.995, 0.499167, 1.19814), *actual3, 1e-3));
|
||||
|
||||
// 4. Now with optimization on
|
||||
optimize = true;
|
||||
boost::optional<Point3> actual4 = //
|
||||
triangulatePoint3<Cal3DS2>(poses, sharedDistortedCal, measurements,
|
||||
triangulatePoint3<Cal3DS2>(kPoses, sharedDistortedCal, measurements,
|
||||
rank_tol, optimize);
|
||||
EXPECT(assert_equal(Point3(4.995, 0.499167, 1.19814), *actual4, 1e-3));
|
||||
}
|
||||
|
@ -232,25 +224,21 @@ TEST(triangulation, twoPosesFisheye) {
|
|||
Point2 z1Distorted = camera1Distorted.project(kLandmark);
|
||||
Point2 z2Distorted = camera2Distorted.project(kLandmark);
|
||||
|
||||
vector<Pose3> poses;
|
||||
Point2Vector measurements;
|
||||
|
||||
poses += kPose1, kPose2;
|
||||
measurements += z1Distorted, z2Distorted;
|
||||
Point2Vector measurements{z1Distorted, z2Distorted};
|
||||
|
||||
double rank_tol = 1e-9;
|
||||
|
||||
// 1. Test simple DLT, perfect in no noise situation
|
||||
bool optimize = false;
|
||||
boost::optional<Point3> actual1 = //
|
||||
triangulatePoint3<Calibration>(poses, sharedDistortedCal, measurements,
|
||||
triangulatePoint3<Calibration>(kPoses, sharedDistortedCal, measurements,
|
||||
rank_tol, optimize);
|
||||
EXPECT(assert_equal(kLandmark, *actual1, 1e-7));
|
||||
|
||||
// 2. test with optimization on, same answer
|
||||
optimize = true;
|
||||
boost::optional<Point3> actual2 = //
|
||||
triangulatePoint3<Calibration>(poses, sharedDistortedCal, measurements,
|
||||
triangulatePoint3<Calibration>(kPoses, sharedDistortedCal, measurements,
|
||||
rank_tol, optimize);
|
||||
EXPECT(assert_equal(kLandmark, *actual2, 1e-7));
|
||||
|
||||
|
@ -260,14 +248,14 @@ TEST(triangulation, twoPosesFisheye) {
|
|||
measurements.at(1) += Point2(-0.2, 0.3);
|
||||
optimize = false;
|
||||
boost::optional<Point3> actual3 = //
|
||||
triangulatePoint3<Calibration>(poses, sharedDistortedCal, measurements,
|
||||
triangulatePoint3<Calibration>(kPoses, sharedDistortedCal, measurements,
|
||||
rank_tol, optimize);
|
||||
EXPECT(assert_equal(Point3(4.995, 0.499167, 1.19814), *actual3, 1e-3));
|
||||
|
||||
// 4. Now with optimization on
|
||||
optimize = true;
|
||||
boost::optional<Point3> actual4 = //
|
||||
triangulatePoint3<Calibration>(poses, sharedDistortedCal, measurements,
|
||||
triangulatePoint3<Calibration>(kPoses, sharedDistortedCal, measurements,
|
||||
rank_tol, optimize);
|
||||
EXPECT(assert_equal(Point3(4.995, 0.499167, 1.19814), *actual4, 1e-3));
|
||||
}
|
||||
|
@ -284,17 +272,13 @@ TEST(triangulation, twoPosesBundler) {
|
|||
Point2 z1 = camera1.project(kLandmark);
|
||||
Point2 z2 = camera2.project(kLandmark);
|
||||
|
||||
vector<Pose3> poses;
|
||||
Point2Vector measurements;
|
||||
|
||||
poses += kPose1, kPose2;
|
||||
measurements += z1, z2;
|
||||
Point2Vector measurements{z1, z2};
|
||||
|
||||
bool optimize = true;
|
||||
double rank_tol = 1e-9;
|
||||
|
||||
boost::optional<Point3> actual = //
|
||||
triangulatePoint3<Cal3Bundler>(poses, bundlerCal, measurements, rank_tol,
|
||||
triangulatePoint3<Cal3Bundler>(kPoses, bundlerCal, measurements, rank_tol,
|
||||
optimize);
|
||||
EXPECT(assert_equal(kLandmark, *actual, 1e-7));
|
||||
|
||||
|
@ -303,19 +287,15 @@ TEST(triangulation, twoPosesBundler) {
|
|||
measurements.at(1) += Point2(-0.2, 0.3);
|
||||
|
||||
boost::optional<Point3> actual2 = //
|
||||
triangulatePoint3<Cal3Bundler>(poses, bundlerCal, measurements, rank_tol,
|
||||
triangulatePoint3<Cal3Bundler>(kPoses, bundlerCal, measurements, rank_tol,
|
||||
optimize);
|
||||
EXPECT(assert_equal(Point3(4.995, 0.499167, 1.19847), *actual2, 1e-3));
|
||||
}
|
||||
|
||||
//******************************************************************************
|
||||
TEST(triangulation, fourPoses) {
|
||||
vector<Pose3> poses;
|
||||
Point2Vector measurements;
|
||||
|
||||
poses += kPose1, kPose2;
|
||||
measurements += kZ1, kZ2;
|
||||
|
||||
Pose3Vector poses = kPoses;
|
||||
Point2Vector measurements = kMeasurements;
|
||||
boost::optional<Point3> actual =
|
||||
triangulatePoint3<Cal3_S2>(poses, kSharedCal, measurements);
|
||||
EXPECT(assert_equal(kLandmark, *actual, 1e-2));
|
||||
|
@ -334,8 +314,8 @@ TEST(triangulation, fourPoses) {
|
|||
PinholeCamera<Cal3_S2> camera3(pose3, *kSharedCal);
|
||||
Point2 z3 = camera3.project(kLandmark);
|
||||
|
||||
poses += pose3;
|
||||
measurements += z3 + Point2(0.1, -0.1);
|
||||
poses.push_back(pose3);
|
||||
measurements.push_back(z3 + Point2(0.1, -0.1));
|
||||
|
||||
boost::optional<Point3> triangulated_3cameras = //
|
||||
triangulatePoint3<Cal3_S2>(poses, kSharedCal, measurements);
|
||||
|
@ -353,8 +333,8 @@ TEST(triangulation, fourPoses) {
|
|||
#ifdef GTSAM_THROW_CHEIRALITY_EXCEPTION
|
||||
CHECK_EXCEPTION(camera4.project(kLandmark), CheiralityException);
|
||||
|
||||
poses += pose4;
|
||||
measurements += Point2(400, 400);
|
||||
poses.push_back(pose4);
|
||||
measurements.emplace_back(400, 400);
|
||||
|
||||
CHECK_EXCEPTION(triangulatePoint3<Cal3_S2>(poses, kSharedCal, measurements),
|
||||
TriangulationCheiralityException);
|
||||
|
@ -368,10 +348,8 @@ TEST(triangulation, threePoses_robustNoiseModel) {
|
|||
PinholeCamera<Cal3_S2> camera3(pose3, *kSharedCal);
|
||||
Point2 z3 = camera3.project(kLandmark);
|
||||
|
||||
vector<Pose3> poses;
|
||||
Point2Vector measurements;
|
||||
poses += kPose1, kPose2, pose3;
|
||||
measurements += kZ1, kZ2, z3;
|
||||
const vector<Pose3> poses{kPose1, kPose2, pose3};
|
||||
Point2Vector measurements{kZ1, kZ2, z3};
|
||||
|
||||
// noise free, so should give exactly the landmark
|
||||
boost::optional<Point3> actual =
|
||||
|
@ -410,10 +388,9 @@ TEST(triangulation, fourPoses_robustNoiseModel) {
|
|||
PinholeCamera<Cal3_S2> camera3(pose3, *kSharedCal);
|
||||
Point2 z3 = camera3.project(kLandmark);
|
||||
|
||||
vector<Pose3> poses;
|
||||
Point2Vector measurements;
|
||||
poses += kPose1, kPose1, kPose2, pose3; // 2 measurements from pose 1
|
||||
measurements += kZ1, kZ1, kZ2, z3;
|
||||
const vector<Pose3> poses{kPose1, kPose1, kPose2, pose3};
|
||||
// 2 measurements from pose 1:
|
||||
Point2Vector measurements{kZ1, kZ1, kZ2, z3};
|
||||
|
||||
// noise free, so should give exactly the landmark
|
||||
boost::optional<Point3> actual =
|
||||
|
@ -463,11 +440,8 @@ TEST(triangulation, fourPoses_distinct_Ks) {
|
|||
Point2 z1 = camera1.project(kLandmark);
|
||||
Point2 z2 = camera2.project(kLandmark);
|
||||
|
||||
CameraSet<PinholeCamera<Cal3_S2>> cameras;
|
||||
Point2Vector measurements;
|
||||
|
||||
cameras += camera1, camera2;
|
||||
measurements += z1, z2;
|
||||
CameraSet<PinholeCamera<Cal3_S2>> cameras{camera1, camera2};
|
||||
Point2Vector measurements{z1, z2};
|
||||
|
||||
boost::optional<Point3> actual = //
|
||||
triangulatePoint3<Cal3_S2>(cameras, measurements);
|
||||
|
@ -488,8 +462,8 @@ TEST(triangulation, fourPoses_distinct_Ks) {
|
|||
PinholeCamera<Cal3_S2> camera3(pose3, K3);
|
||||
Point2 z3 = camera3.project(kLandmark);
|
||||
|
||||
cameras += camera3;
|
||||
measurements += z3 + Point2(0.1, -0.1);
|
||||
cameras.push_back(camera3);
|
||||
measurements.push_back(z3 + Point2(0.1, -0.1));
|
||||
|
||||
boost::optional<Point3> triangulated_3cameras = //
|
||||
triangulatePoint3<Cal3_S2>(cameras, measurements);
|
||||
|
@ -508,8 +482,8 @@ TEST(triangulation, fourPoses_distinct_Ks) {
|
|||
#ifdef GTSAM_THROW_CHEIRALITY_EXCEPTION
|
||||
CHECK_EXCEPTION(camera4.project(kLandmark), CheiralityException);
|
||||
|
||||
cameras += camera4;
|
||||
measurements += Point2(400, 400);
|
||||
cameras.push_back(camera4);
|
||||
measurements.emplace_back(400, 400);
|
||||
CHECK_EXCEPTION(triangulatePoint3<Cal3_S2>(cameras, measurements),
|
||||
TriangulationCheiralityException);
|
||||
#endif
|
||||
|
@ -529,11 +503,8 @@ TEST(triangulation, fourPoses_distinct_Ks_distortion) {
|
|||
Point2 z1 = camera1.project(kLandmark);
|
||||
Point2 z2 = camera2.project(kLandmark);
|
||||
|
||||
CameraSet<PinholeCamera<Cal3DS2>> cameras;
|
||||
Point2Vector measurements;
|
||||
|
||||
cameras += camera1, camera2;
|
||||
measurements += z1, z2;
|
||||
const CameraSet<PinholeCamera<Cal3DS2>> cameras{camera1, camera2};
|
||||
const Point2Vector measurements{z1, z2};
|
||||
|
||||
boost::optional<Point3> actual = //
|
||||
triangulatePoint3<Cal3DS2>(cameras, measurements);
|
||||
|
@ -554,11 +525,8 @@ TEST(triangulation, outliersAndFarLandmarks) {
|
|||
Point2 z1 = camera1.project(kLandmark);
|
||||
Point2 z2 = camera2.project(kLandmark);
|
||||
|
||||
CameraSet<PinholeCamera<Cal3_S2>> cameras;
|
||||
Point2Vector measurements;
|
||||
|
||||
cameras += camera1, camera2;
|
||||
measurements += z1, z2;
|
||||
CameraSet<PinholeCamera<Cal3_S2>> cameras{camera1, camera2};
|
||||
Point2Vector measurements{z1, z2};
|
||||
|
||||
double landmarkDistanceThreshold = 10; // landmark is closer than that
|
||||
TriangulationParameters params(
|
||||
|
@ -582,8 +550,8 @@ TEST(triangulation, outliersAndFarLandmarks) {
|
|||
PinholeCamera<Cal3_S2> camera3(pose3, K3);
|
||||
Point2 z3 = camera3.project(kLandmark);
|
||||
|
||||
cameras += camera3;
|
||||
measurements += z3 + Point2(10, -10);
|
||||
cameras.push_back(camera3);
|
||||
measurements.push_back(z3 + Point2(10, -10));
|
||||
|
||||
landmarkDistanceThreshold = 10; // landmark is closer than that
|
||||
double outlierThreshold = 100; // loose, the outlier is going to pass
|
||||
|
@ -608,11 +576,8 @@ TEST(triangulation, twoIdenticalPoses) {
|
|||
// 1. Project two landmarks into two cameras and triangulate
|
||||
Point2 z1 = camera1.project(kLandmark);
|
||||
|
||||
vector<Pose3> poses;
|
||||
Point2Vector measurements;
|
||||
|
||||
poses += kPose1, kPose1;
|
||||
measurements += z1, z1;
|
||||
const vector<Pose3> poses{kPose1, kPose1};
|
||||
const Point2Vector measurements{z1, z1};
|
||||
|
||||
CHECK_EXCEPTION(triangulatePoint3<Cal3_S2>(poses, kSharedCal, measurements),
|
||||
TriangulationUnderconstrainedException);
|
||||
|
@ -623,22 +588,19 @@ TEST(triangulation, onePose) {
|
|||
// we expect this test to fail with a TriangulationUnderconstrainedException
|
||||
// because there's only one camera observation
|
||||
|
||||
vector<Pose3> poses;
|
||||
Point2Vector measurements;
|
||||
|
||||
poses += Pose3();
|
||||
measurements += Point2(0, 0);
|
||||
const vector<Pose3> poses{Pose3()};
|
||||
const Point2Vector measurements {{0,0}};
|
||||
|
||||
CHECK_EXCEPTION(triangulatePoint3<Cal3_S2>(poses, kSharedCal, measurements),
|
||||
TriangulationUnderconstrainedException);
|
||||
}
|
||||
|
||||
//******************************************************************************
|
||||
TEST(triangulation, StereotriangulateNonlinear) {
|
||||
TEST(triangulation, StereoTriangulateNonlinear) {
|
||||
auto stereoK = boost::make_shared<Cal3_S2Stereo>(1733.75, 1733.75, 0, 689.645,
|
||||
508.835, 0.0699612);
|
||||
|
||||
// two camera poses m1, m2
|
||||
// two camera kPoses m1, m2
|
||||
Matrix4 m1, m2;
|
||||
m1 << 0.796888717, 0.603404026, -0.0295271487, 46.6673779, 0.592783835,
|
||||
-0.77156583, 0.230856632, 66.2186159, 0.116517574, -0.201470143,
|
||||
|
@ -648,14 +610,12 @@ TEST(triangulation, StereotriangulateNonlinear) {
|
|||
0.947083213, 0.131587097, 65.843136, -0.0206094928, 0.131334858,
|
||||
-0.991123524, -4.3525033, 0, 0, 0, 1;
|
||||
|
||||
typedef CameraSet<StereoCamera> Cameras;
|
||||
Cameras cameras;
|
||||
cameras.push_back(StereoCamera(Pose3(m1), stereoK));
|
||||
cameras.push_back(StereoCamera(Pose3(m2), stereoK));
|
||||
typedef CameraSet<StereoCamera> StereoCameras;
|
||||
const StereoCameras cameras{{Pose3(m1), stereoK}, {Pose3(m2), stereoK}};
|
||||
|
||||
StereoPoint2Vector measurements;
|
||||
measurements += StereoPoint2(226.936, 175.212, 424.469);
|
||||
measurements += StereoPoint2(339.571, 285.547, 669.973);
|
||||
measurements.push_back(StereoPoint2(226.936, 175.212, 424.469));
|
||||
measurements.push_back(StereoPoint2(339.571, 285.547, 669.973));
|
||||
|
||||
Point3 initial =
|
||||
Point3(46.0536958, 66.4621179, -6.56285929); // error: 96.5715555191
|
||||
|
@ -741,8 +701,6 @@ TEST(triangulation, StereotriangulateNonlinear) {
|
|||
//******************************************************************************
|
||||
// Simple test with a well-behaved two camera situation
|
||||
TEST(triangulation, twoPoses_sphericalCamera) {
|
||||
vector<Pose3> poses;
|
||||
std::vector<Unit3> measurements;
|
||||
|
||||
// Project landmark into two cameras and triangulate
|
||||
SphericalCamera cam1(kPose1);
|
||||
|
@ -750,8 +708,7 @@ TEST(triangulation, twoPoses_sphericalCamera) {
|
|||
Unit3 u1 = cam1.project(kLandmark);
|
||||
Unit3 u2 = cam2.project(kLandmark);
|
||||
|
||||
poses += kPose1, kPose2;
|
||||
measurements += u1, u2;
|
||||
std::vector<Unit3> measurements{u1, u2};
|
||||
|
||||
CameraSet<SphericalCamera> cameras;
|
||||
cameras.push_back(cam1);
|
||||
|
@ -803,9 +760,6 @@ TEST(triangulation, twoPoses_sphericalCamera) {
|
|||
|
||||
//******************************************************************************
|
||||
TEST(triangulation, twoPoses_sphericalCamera_extremeFOV) {
|
||||
vector<Pose3> poses;
|
||||
std::vector<Unit3> measurements;
|
||||
|
||||
// Project landmark into two cameras and triangulate
|
||||
Pose3 poseA = Pose3(
|
||||
Rot3::Ypr(-M_PI / 2, 0., -M_PI / 2),
|
||||
|
@ -825,8 +779,7 @@ TEST(triangulation, twoPoses_sphericalCamera_extremeFOV) {
|
|||
EXPECT(assert_equal(Unit3(Point3(1.0, 0.0, -1.0)), u2,
|
||||
1e-7)); // behind and to the right of PoseB
|
||||
|
||||
poses += kPose1, kPose2;
|
||||
measurements += u1, u2;
|
||||
const std::vector<Unit3> measurements{u1, u2};
|
||||
|
||||
CameraSet<SphericalCamera> cameras;
|
||||
cameras.push_back(cam1);
|
||||
|
|
|
@ -31,12 +31,9 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
|
||||
#include <cmath>
|
||||
#include <random>
|
||||
|
||||
using namespace boost::assign;
|
||||
using namespace std::placeholders;
|
||||
using namespace gtsam;
|
||||
using namespace std;
|
||||
|
@ -51,9 +48,8 @@ Point3 point3_(const Unit3& p) {
|
|||
}
|
||||
|
||||
TEST(Unit3, point3) {
|
||||
vector<Point3> ps;
|
||||
ps += Point3(1, 0, 0), Point3(0, 1, 0), Point3(0, 0, 1), Point3(1, 1, 0)
|
||||
/ sqrt(2.0);
|
||||
const vector<Point3> ps{Point3(1, 0, 0), Point3(0, 1, 0), Point3(0, 0, 1),
|
||||
Point3(1, 1, 0) / sqrt(2.0)};
|
||||
Matrix actualH, expectedH;
|
||||
for(Point3 p: ps) {
|
||||
Unit3 s(p);
|
||||
|
|
|
@ -36,12 +36,12 @@ const DiscreteKey mode{M(0), 2};
|
|||
* num_measurements is the number of measurements of the continuous variable x0.
|
||||
* If manyModes is true, then we introduce one mode per measurement.
|
||||
*/
|
||||
inline HybridBayesNet createHybridBayesNet(int num_measurements = 1,
|
||||
inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1,
|
||||
bool manyModes = false) {
|
||||
HybridBayesNet bayesNet;
|
||||
|
||||
// Create Gaussian mixture z_i = x0 + noise for each measurement.
|
||||
for (int i = 0; i < num_measurements; i++) {
|
||||
for (size_t i = 0; i < num_measurements; i++) {
|
||||
const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode;
|
||||
bayesNet.emplace_back(
|
||||
new GaussianMixture({Z(i)}, {X(0)}, {mode_i},
|
||||
|
@ -57,7 +57,7 @@ inline HybridBayesNet createHybridBayesNet(int num_measurements = 1,
|
|||
|
||||
// Add prior on mode.
|
||||
const size_t nrModes = manyModes ? num_measurements : 1;
|
||||
for (int i = 0; i < nrModes; i++) {
|
||||
for (size_t i = 0; i < nrModes; i++) {
|
||||
bayesNet.emplace_back(new DiscreteConditional({M(i), 2}, "4/6"));
|
||||
}
|
||||
return bayesNet;
|
||||
|
@ -70,7 +70,7 @@ inline HybridBayesNet createHybridBayesNet(int num_measurements = 1,
|
|||
* the generative Bayes net model HybridBayesNet::Example(num_measurements)
|
||||
*/
|
||||
inline HybridGaussianFactorGraph createHybridGaussianFactorGraph(
|
||||
int num_measurements = 1,
|
||||
size_t num_measurements = 1,
|
||||
boost::optional<VectorValues> measurements = boost::none,
|
||||
bool manyModes = false) {
|
||||
auto bayesNet = createHybridBayesNet(num_measurements, manyModes);
|
||||
|
|
|
@ -39,7 +39,7 @@ class BayesNet : public FactorGraph<CONDITIONAL> {
|
|||
sharedConditional; ///< A shared pointer to a conditional
|
||||
|
||||
protected:
|
||||
/// @name Standard Constructors
|
||||
/// @name Protected Constructors
|
||||
/// @{
|
||||
|
||||
/** Default constructor as an empty BayesNet */
|
||||
|
@ -50,6 +50,12 @@ class BayesNet : public FactorGraph<CONDITIONAL> {
|
|||
BayesNet(ITERATOR firstConditional, ITERATOR lastConditional)
|
||||
: Base(firstConditional, lastConditional) {}
|
||||
|
||||
/**
|
||||
* Constructor that takes an initializer list of shared pointers.
|
||||
* BayesNet<SymbolicConditional> bn = {make_shared<SymbolicConditional>(), ...};
|
||||
*/
|
||||
BayesNet(std::initializer_list<sharedConditional> conditionals): Base(conditionals) {}
|
||||
|
||||
/// @}
|
||||
|
||||
public:
|
||||
|
|
|
@ -26,11 +26,8 @@
|
|||
#include <gtsam/base/timing.h>
|
||||
|
||||
#include <boost/optional.hpp>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
#include <fstream>
|
||||
|
||||
using boost::assign::cref_list_of;
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -281,8 +278,8 @@ namespace gtsam {
|
|||
FactorGraphType cliqueMarginal = clique->marginal2(function);
|
||||
|
||||
// Now, marginalize out everything that is not variable j
|
||||
BayesNetType marginalBN = *cliqueMarginal.marginalMultifrontalBayesNet(
|
||||
Ordering(cref_list_of<1,Key>(j)), function);
|
||||
BayesNetType marginalBN =
|
||||
*cliqueMarginal.marginalMultifrontalBayesNet(Ordering{j}, function);
|
||||
|
||||
// The Bayes net should contain only one conditional for variable j, so return it
|
||||
return marginalBN.front();
|
||||
|
@ -403,7 +400,7 @@ namespace gtsam {
|
|||
}
|
||||
|
||||
// now, marginalize out everything that is not variable j1 or j2
|
||||
return p_BC1C2.marginalMultifrontalBayesNet(Ordering(cref_list_of<2,Key>(j1)(j2)), function);
|
||||
return p_BC1C2.marginalMultifrontalBayesNet(Ordering{j1, j2}, function);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
#pragma once
|
||||
|
||||
#include <gtsam/inference/BayesTreeCliqueBase.h>
|
||||
#include <gtsam/inference/FactorGraph-inst.h>
|
||||
#include <gtsam/base/timing.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
|
|
@ -154,10 +154,22 @@ class FactorGraph {
|
|||
/// @}
|
||||
|
||||
public:
|
||||
/// @name Constructors
|
||||
/// @{
|
||||
|
||||
/// Default destructor
|
||||
// Public and virtual so boost serialization can call it.
|
||||
/// Public and virtual so boost serialization can call it.
|
||||
virtual ~FactorGraph() = default;
|
||||
|
||||
/**
|
||||
* Constructor that takes an initializer list of shared pointers.
|
||||
* FactorGraph fg = {make_shared<MyFactor>(), ...};
|
||||
*/
|
||||
template <class DERIVEDFACTOR, typename = IsDerived<DERIVEDFACTOR>>
|
||||
FactorGraph(std::initializer_list<boost::shared_ptr<DERIVEDFACTOR>> sharedFactors)
|
||||
: factors_(sharedFactors) {}
|
||||
|
||||
/// @}
|
||||
/// @name Adding Single Factors
|
||||
/// @{
|
||||
|
||||
|
|
|
@ -50,18 +50,14 @@ public:
|
|||
Ordering() {
|
||||
}
|
||||
|
||||
using KeyVector::KeyVector; // Inherit the KeyVector's constructors
|
||||
|
||||
/// Create from a container
|
||||
template<typename KEYS>
|
||||
explicit Ordering(const KEYS& keys) :
|
||||
Base(keys.begin(), keys.end()) {
|
||||
}
|
||||
|
||||
/// Create an ordering using iterators over keys
|
||||
template<typename ITERATOR>
|
||||
Ordering(ITERATOR firstKey, ITERATOR lastKey) :
|
||||
Base(firstKey, lastKey) {
|
||||
}
|
||||
|
||||
/// Add new variables to the ordering as ordering += key1, key2, ... Equivalent to calling
|
||||
/// push_back.
|
||||
boost::assign::list_inserter<boost::assign_detail::call_push_back<This> > operator+=(
|
||||
|
@ -195,7 +191,7 @@ public:
|
|||
KeySet src = fg.keys();
|
||||
KeyVector keys(src.begin(), src.end());
|
||||
std::stable_sort(keys.begin(), keys.end());
|
||||
return Ordering(keys);
|
||||
return Ordering(keys.begin(), keys.end());
|
||||
}
|
||||
|
||||
/// METIS Formatting function
|
||||
|
|
|
@ -21,11 +21,8 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/assign/std/list.hpp> // for operator +=
|
||||
|
||||
#include <sstream>
|
||||
|
||||
using namespace boost::assign;
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
||||
|
|
|
@ -19,9 +19,7 @@
|
|||
#include <gtsam/base/TestableAssertions.h>
|
||||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <boost/assign/std/list.hpp> // for operator +=
|
||||
|
||||
using namespace boost::assign;
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
||||
|
|
|
@ -22,11 +22,8 @@
|
|||
#include <gtsam/base/TestableAssertions.h>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/assign/std.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using namespace boost::assign;
|
||||
|
||||
namespace example {
|
||||
SymbolicFactorGraph symbolicChain() {
|
||||
|
@ -47,33 +44,33 @@ TEST(Ordering, constrained_ordering) {
|
|||
// unconstrained version
|
||||
{
|
||||
Ordering actual = Ordering::Colamd(symbolicGraph);
|
||||
Ordering expected = Ordering(list_of(0)(1)(2)(3)(4)(5));
|
||||
Ordering expected{0, 1, 2, 3, 4, 5};
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
|
||||
// constrained version - push one set to the end
|
||||
{
|
||||
Ordering actual = Ordering::ColamdConstrainedLast(symbolicGraph, list_of(2)(4));
|
||||
Ordering expected = Ordering(list_of(0)(1)(5)(3)(4)(2));
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
Ordering actual = Ordering::ColamdConstrainedLast(symbolicGraph, {2, 4});
|
||||
Ordering expected = Ordering({0, 1, 5, 3, 4, 2});
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
|
||||
// constrained version - push one set to the start
|
||||
{
|
||||
Ordering actual = Ordering::ColamdConstrainedFirst(symbolicGraph, list_of(2)(4));
|
||||
Ordering expected = Ordering(list_of(2)(4)(0)(1)(3)(5));
|
||||
Ordering actual = Ordering::ColamdConstrainedFirst(symbolicGraph, {2, 4});
|
||||
Ordering expected = Ordering({2, 4, 0, 1, 3, 5});
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
|
||||
// Make sure giving empty constraints does not break the code
|
||||
{
|
||||
Ordering actual = Ordering::ColamdConstrainedLast(symbolicGraph, {});
|
||||
Ordering expected = Ordering(list_of(0)(1)(2)(3)(4)(5));
|
||||
Ordering expected = Ordering({0, 1, 2, 3, 4, 5});
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
{
|
||||
Ordering actual = Ordering::ColamdConstrainedFirst(symbolicGraph, {});
|
||||
Ordering expected = Ordering(list_of(0)(1)(2)(3)(4)(5));
|
||||
Ordering expected = Ordering({0, 1, 2, 3, 4, 5});
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
|
||||
|
@ -81,11 +78,11 @@ TEST(Ordering, constrained_ordering) {
|
|||
SymbolicFactorGraph emptyGraph;
|
||||
Ordering empty;
|
||||
{
|
||||
Ordering actual = Ordering::ColamdConstrainedLast(emptyGraph, list_of(2)(4));
|
||||
Ordering actual = Ordering::ColamdConstrainedLast(emptyGraph, {2, 4});
|
||||
EXPECT(assert_equal(empty, actual));
|
||||
}
|
||||
{
|
||||
Ordering actual = Ordering::ColamdConstrainedFirst(emptyGraph, list_of(2)(4));
|
||||
Ordering actual = Ordering::ColamdConstrainedFirst(emptyGraph, {2, 4});
|
||||
EXPECT(assert_equal(empty, actual));
|
||||
}
|
||||
}
|
||||
|
@ -105,7 +102,7 @@ TEST(Ordering, grouped_constrained_ordering) {
|
|||
constraints[5] = 2;
|
||||
|
||||
Ordering actual = Ordering::ColamdConstrained(symbolicGraph, constraints);
|
||||
Ordering expected = list_of(0)(1)(3)(2)(4)(5);
|
||||
Ordering expected{0, 1, 3, 2, 4, 5};
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
|
||||
|
@ -139,9 +136,11 @@ TEST(Ordering, csr_format) {
|
|||
|
||||
MetisIndex mi(symbolicGraph);
|
||||
|
||||
vector<int> xadjExpected, adjExpected;
|
||||
xadjExpected += 0, 2, 5, 8, 11, 13, 16, 20, 24, 28, 31, 33, 36, 39, 42, 44;
|
||||
adjExpected += 1, 5, 0, 2, 6, 1, 3, 7, 2, 4, 8, 3, 9, 0, 6, 10, 1, 5, 7, 11, 2, 6, 8, 12, 3, 7, 9, 13, 4, 8, 14, 5, 11, 6, 10, 12, 7, 11, 13, 8, 12, 14, 9, 13;
|
||||
const vector<int> xadjExpected{0, 2, 5, 8, 11, 13, 16, 20,
|
||||
24, 28, 31, 33, 36, 39, 42, 44},
|
||||
adjExpected{1, 5, 0, 2, 6, 1, 3, 7, 2, 4, 8, 3, 9, 0, 6,
|
||||
10, 1, 5, 7, 11, 2, 6, 8, 12, 3, 7, 9, 13, 4, 8,
|
||||
14, 5, 11, 6, 10, 12, 7, 11, 13, 8, 12, 14, 9, 13};
|
||||
|
||||
EXPECT(xadjExpected == mi.xadj());
|
||||
EXPECT(adjExpected.size() == mi.adj().size());
|
||||
|
@ -161,9 +160,8 @@ TEST(Ordering, csr_format_2) {
|
|||
|
||||
MetisIndex mi(symbolicGraph);
|
||||
|
||||
vector<int> xadjExpected, adjExpected;
|
||||
xadjExpected += 0, 1, 4, 6, 8, 10;
|
||||
adjExpected += 1, 0, 2, 4, 1, 3, 2, 4, 1, 3;
|
||||
const std::vector<int> xadjExpected{0, 1, 4, 6, 8, 10},
|
||||
adjExpected{1, 0, 2, 4, 1, 3, 2, 4, 1, 3};
|
||||
|
||||
EXPECT(xadjExpected == mi.xadj());
|
||||
EXPECT(adjExpected.size() == mi.adj().size());
|
||||
|
@ -183,9 +181,8 @@ TEST(Ordering, csr_format_3) {
|
|||
|
||||
MetisIndex mi(symbolicGraph);
|
||||
|
||||
vector<int> xadjExpected, adjExpected;
|
||||
xadjExpected += 0, 1, 4, 6, 8, 10;
|
||||
adjExpected += 1, 0, 2, 4, 1, 3, 2, 4, 1, 3;
|
||||
const std::vector<int> xadjExpected{0, 1, 4, 6, 8, 10},
|
||||
adjExpected{1, 0, 2, 4, 1, 3, 2, 4, 1, 3};
|
||||
//size_t minKey = mi.minKey();
|
||||
|
||||
vector<int> adjAcutal = mi.adj();
|
||||
|
@ -202,24 +199,18 @@ TEST(Ordering, csr_format_3) {
|
|||
/* ************************************************************************* */
|
||||
TEST(Ordering, AppendVector) {
|
||||
using symbol_shorthand::X;
|
||||
KeyVector keys{X(0), X(1), X(2)};
|
||||
Ordering actual;
|
||||
KeyVector keys = {X(0), X(1), X(2)};
|
||||
actual += keys;
|
||||
|
||||
Ordering expected;
|
||||
expected += X(0);
|
||||
expected += X(1);
|
||||
expected += X(2);
|
||||
Ordering expected{X(0), X(1), X(2)};
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(Ordering, Contains) {
|
||||
using symbol_shorthand::X;
|
||||
Ordering ordering;
|
||||
ordering += X(0);
|
||||
ordering += X(1);
|
||||
ordering += X(2);
|
||||
Ordering ordering{X(0), X(1), X(2)};
|
||||
|
||||
EXPECT(ordering.contains(X(1)));
|
||||
EXPECT(!ordering.contains(X(4)));
|
||||
|
@ -239,9 +230,8 @@ TEST(Ordering, csr_format_4) {
|
|||
|
||||
MetisIndex mi(symbolicGraph);
|
||||
|
||||
vector<int> xadjExpected, adjExpected;
|
||||
xadjExpected += 0, 1, 3, 5, 7, 9, 10;
|
||||
adjExpected += 1, 0, 2, 1, 3, 2, 4, 3, 5, 4;
|
||||
const vector<int> xadjExpected{0, 1, 3, 5, 7, 9, 10},
|
||||
adjExpected{1, 0, 2, 1, 3, 2, 4, 3, 5, 4};
|
||||
|
||||
vector<int> adjAcutal = mi.adj();
|
||||
vector<int> xadjActual = mi.xadj();
|
||||
|
@ -274,9 +264,7 @@ TEST(Ordering, metis) {
|
|||
|
||||
MetisIndex mi(symbolicGraph);
|
||||
|
||||
vector<int> xadjExpected, adjExpected;
|
||||
xadjExpected += 0, 1, 3, 4;
|
||||
adjExpected += 1, 0, 2, 1;
|
||||
const vector<int> xadjExpected{0, 1, 3, 4}, adjExpected{1, 0, 2, 1};
|
||||
|
||||
EXPECT(xadjExpected == mi.xadj());
|
||||
EXPECT(adjExpected.size() == mi.adj().size());
|
||||
|
@ -303,7 +291,7 @@ TEST(Ordering, MetisLoop) {
|
|||
// | - P( 4 | 0 3)
|
||||
// | | - P( 5 | 0 4)
|
||||
// | - P( 2 | 1 3)
|
||||
Ordering expected = Ordering(list_of(5)(4)(2)(1)(0)(3));
|
||||
Ordering expected = Ordering({5, 4, 2, 1, 0, 3});
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
#elif defined(_WIN32)
|
||||
|
@ -313,7 +301,7 @@ TEST(Ordering, MetisLoop) {
|
|||
// | - P( 3 | 5 2)
|
||||
// | | - P( 4 | 5 3)
|
||||
// | - P( 1 | 0 2)
|
||||
Ordering expected = Ordering(list_of(4)(3)(1)(0)(5)(2));
|
||||
Ordering expected = Ordering({4, 3, 1, 0, 5, 2});
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
#else
|
||||
|
@ -323,7 +311,7 @@ TEST(Ordering, MetisLoop) {
|
|||
// | - P( 2 | 4 1)
|
||||
// | | - P( 3 | 4 2)
|
||||
// | - P( 5 | 0 1)
|
||||
Ordering expected = Ordering(list_of(3)(2)(5)(0)(4)(1));
|
||||
Ordering expected = Ordering({3, 2, 5, 0, 4, 1});
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
#endif
|
||||
|
@ -347,7 +335,7 @@ TEST(Ordering, MetisSingleNode) {
|
|||
symbolicGraph.push_factor(7);
|
||||
|
||||
Ordering actual = Ordering::Create(Ordering::METIS, symbolicGraph);
|
||||
Ordering expected = Ordering(list_of(7));
|
||||
Ordering expected = Ordering({7});
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
#endif
|
||||
|
@ -365,7 +353,7 @@ TEST(Ordering, Create) {
|
|||
//| | | - P( 1 | 2)
|
||||
//| | | | - P( 0 | 1)
|
||||
Ordering actual = Ordering::Create(Ordering::COLAMD, symbolicGraph);
|
||||
Ordering expected = Ordering(list_of(0)(1)(2)(3)(4)(5));
|
||||
Ordering expected = Ordering({0, 1, 2, 3, 4, 5});
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
|
||||
|
@ -376,7 +364,7 @@ TEST(Ordering, Create) {
|
|||
//- P( 1 0 2)
|
||||
//| - P( 3 4 | 2)
|
||||
//| | - P( 5 | 4)
|
||||
Ordering expected = Ordering(list_of(5)(3)(4)(1)(0)(2));
|
||||
Ordering expected = Ordering({5, 3, 4, 1, 0, 2});
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -22,11 +22,8 @@
|
|||
#include <gtsam/inference/VariableSlots.h>
|
||||
#include <gtsam/symbolic/SymbolicFactorGraph.h>
|
||||
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
|
||||
using namespace gtsam;
|
||||
using namespace std;
|
||||
using namespace boost::assign;
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(VariableSlots, constructor) {
|
||||
|
@ -41,12 +38,12 @@ TEST(VariableSlots, constructor) {
|
|||
|
||||
static const size_t none = numeric_limits<size_t>::max();
|
||||
VariableSlots expected((SymbolicFactorGraph()));
|
||||
expected[0] += none, 0, 0, none;
|
||||
expected[1] += none, 1, none, none;
|
||||
expected[2] += 0, none, 1, none;
|
||||
expected[3] += 1, none, none, none;
|
||||
expected[5] += none, none, none, 0;
|
||||
expected[9] += none, none, none, 1;
|
||||
expected[0] = {none, 0, 0, none};
|
||||
expected[1] = {none, 1, none, none};
|
||||
expected[2] = {0, none, 1, none};
|
||||
expected[3] = {1, none, none, none};
|
||||
expected[5] = {none, none, none, 0};
|
||||
expected[9] = {none, none, none, 1};
|
||||
|
||||
CHECK(assert_equal(expected, actual));
|
||||
}
|
||||
|
|
|
@ -26,19 +26,18 @@ using namespace std;
|
|||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
Errors::Errors(){}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Errors::Errors(const VectorValues& V) {
|
||||
for(const Vector& e: V | boost::adaptors::map_values) {
|
||||
push_back(e);
|
||||
Errors createErrors(const VectorValues& V) {
|
||||
Errors result;
|
||||
for (const Vector& e : V | boost::adaptors::map_values) {
|
||||
result.push_back(e);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void Errors::print(const std::string& s) const {
|
||||
void print(const Errors& e, const string& s) {
|
||||
cout << s << endl;
|
||||
for(const Vector& v: *this)
|
||||
for(const Vector& v: e)
|
||||
gtsam::print(v);
|
||||
}
|
||||
|
||||
|
@ -51,50 +50,49 @@ struct equalsVector : public std::function<bool(const Vector&, const Vector&)> {
|
|||
}
|
||||
};
|
||||
|
||||
bool Errors::equals(const Errors& expected, double tol) const {
|
||||
if( size() != expected.size() ) return false;
|
||||
return equal(begin(),end(),expected.begin(),equalsVector(tol));
|
||||
bool equality(const Errors& actual, const Errors& expected, double tol) {
|
||||
if (actual.size() != expected.size()) return false;
|
||||
return equal(actual.begin(), actual.end(), expected.begin(),
|
||||
equalsVector(tol));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Errors Errors::operator+(const Errors& b) const {
|
||||
Errors operator+(const Errors& a, const Errors& b) {
|
||||
#ifndef NDEBUG
|
||||
size_t m = size();
|
||||
size_t m = a.size();
|
||||
if (b.size()!=m)
|
||||
throw(std::invalid_argument("Errors::operator+: incompatible sizes"));
|
||||
#endif
|
||||
Errors result;
|
||||
Errors::const_iterator it = b.begin();
|
||||
for(const Vector& ai: *this)
|
||||
for(const Vector& ai: a)
|
||||
result.push_back(ai + *(it++));
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
/* ************************************************************************* */
|
||||
Errors Errors::operator-(const Errors& b) const {
|
||||
Errors operator-(const Errors& a, const Errors& b) {
|
||||
#ifndef NDEBUG
|
||||
size_t m = size();
|
||||
size_t m = a.size();
|
||||
if (b.size()!=m)
|
||||
throw(std::invalid_argument("Errors::operator-: incompatible sizes"));
|
||||
#endif
|
||||
Errors result;
|
||||
Errors::const_iterator it = b.begin();
|
||||
for(const Vector& ai: *this)
|
||||
for(const Vector& ai: a)
|
||||
result.push_back(ai - *(it++));
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Errors Errors::operator-() const {
|
||||
Errors operator-(const Errors& a) {
|
||||
Errors result;
|
||||
for(const Vector& ai: *this)
|
||||
for(const Vector& ai: a)
|
||||
result.push_back(-ai);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/* ************************************************************************* */
|
||||
double dot(const Errors& a, const Errors& b) {
|
||||
#ifndef NDEBUG
|
||||
|
@ -105,7 +103,7 @@ double dot(const Errors& a, const Errors& b) {
|
|||
double result = 0.0;
|
||||
Errors::const_iterator it = b.begin();
|
||||
for(const Vector& ai: a)
|
||||
result += gtsam::dot(ai, *(it++));
|
||||
result += gtsam::dot<Vector,Vector>(ai, *(it++));
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -116,11 +114,6 @@ void axpy(double alpha, const Errors& x, Errors& y) {
|
|||
yi += alpha * (*(it++));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void print(const Errors& a, const string& s) {
|
||||
a.print(s);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
||||
} // gtsam
|
||||
|
|
|
@ -20,59 +20,54 @@
|
|||
#pragma once
|
||||
|
||||
#include <gtsam/base/FastList.h>
|
||||
#include <gtsam/base/Vector.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/base/Vector.h>
|
||||
|
||||
#include <string>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
// Forward declarations
|
||||
class VectorValues;
|
||||
// Forward declarations
|
||||
class VectorValues;
|
||||
|
||||
/** vector of errors */
|
||||
class Errors : public FastList<Vector> {
|
||||
/// Errors is a vector of errors.
|
||||
using Errors = FastList<Vector>;
|
||||
|
||||
public:
|
||||
/// Break V into pieces according to its start indices.
|
||||
GTSAM_EXPORT Errors createErrors(const VectorValues& V);
|
||||
|
||||
GTSAM_EXPORT Errors() ;
|
||||
/// Print an Errors instance.
|
||||
GTSAM_EXPORT void print(const Errors& e, const std::string& s = "Errors");
|
||||
|
||||
/** break V into pieces according to its start indices */
|
||||
GTSAM_EXPORT Errors(const VectorValues&V);
|
||||
// Check equality for unit testing.
|
||||
GTSAM_EXPORT bool equality(const Errors& actual, const Errors& expected,
|
||||
double tol = 1e-9);
|
||||
|
||||
/** print */
|
||||
GTSAM_EXPORT void print(const std::string& s = "Errors") const;
|
||||
/// Addition.
|
||||
GTSAM_EXPORT Errors operator+(const Errors& a, const Errors& b);
|
||||
|
||||
/** equals, for unit testing */
|
||||
GTSAM_EXPORT bool equals(const Errors& expected, double tol=1e-9) const;
|
||||
/// Subtraction.
|
||||
GTSAM_EXPORT Errors operator-(const Errors& a, const Errors& b);
|
||||
|
||||
/** Addition */
|
||||
GTSAM_EXPORT Errors operator+(const Errors& b) const;
|
||||
/// Negation.
|
||||
GTSAM_EXPORT Errors operator-(const Errors& a);
|
||||
|
||||
/** subtraction */
|
||||
GTSAM_EXPORT Errors operator-(const Errors& b) const;
|
||||
/// Dot product.
|
||||
GTSAM_EXPORT double dot(const Errors& a, const Errors& b);
|
||||
|
||||
/** negation */
|
||||
GTSAM_EXPORT Errors operator-() const ;
|
||||
/// BLAS level 2 style AXPY, `y := alpha*x + y`
|
||||
GTSAM_EXPORT void axpy(double alpha, const Errors& x, Errors& y);
|
||||
|
||||
}; // Errors
|
||||
/// traits
|
||||
template <>
|
||||
struct traits<Errors> {
|
||||
static void Print(const Errors& e, const std::string& str = "") {
|
||||
print(e, str);
|
||||
}
|
||||
static bool Equals(const Errors& actual, const Errors& expected,
|
||||
double tol = 1e-8) {
|
||||
return equality(actual, expected, tol);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* dot product
|
||||
*/
|
||||
GTSAM_EXPORT double dot(const Errors& a, const Errors& b);
|
||||
|
||||
/**
|
||||
* BLAS level 2 style
|
||||
*/
|
||||
GTSAM_EXPORT void axpy(double alpha, const Errors& x, Errors& y);
|
||||
|
||||
/** print with optional string */
|
||||
GTSAM_EXPORT void print(const Errors& a, const std::string& s = "Error");
|
||||
|
||||
/// traits
|
||||
template<>
|
||||
struct traits<Errors> : public Testable<Errors> {
|
||||
};
|
||||
|
||||
} //\ namespace gtsam
|
||||
} // namespace gtsam
|
||||
|
|
|
@ -65,8 +65,17 @@ namespace gtsam {
|
|||
explicit GaussianBayesNet(const FactorGraph<DERIVEDCONDITIONAL>& graph)
|
||||
: Base(graph) {}
|
||||
|
||||
/**
|
||||
* Constructor that takes an initializer list of shared pointers.
|
||||
* BayesNet bn = {make_shared<Conditional>(), ...};
|
||||
*/
|
||||
template <class DERIVEDCONDITIONAL>
|
||||
GaussianBayesNet(
|
||||
std::initializer_list<boost::shared_ptr<DERIVEDCONDITIONAL> > conditionals)
|
||||
: Base(conditionals) {}
|
||||
|
||||
/// Destructor
|
||||
virtual ~GaussianBayesNet() {}
|
||||
virtual ~GaussianBayesNet() = default;
|
||||
|
||||
/// @}
|
||||
|
||||
|
|
|
@ -80,9 +80,20 @@ namespace gtsam {
|
|||
typedef EliminateableFactorGraph<This> BaseEliminateable; ///< Typedef to base elimination class
|
||||
typedef boost::shared_ptr<This> shared_ptr; ///< shared_ptr to this class
|
||||
|
||||
/// @name Constructors
|
||||
/// @{
|
||||
|
||||
/** Default constructor */
|
||||
GaussianFactorGraph() {}
|
||||
|
||||
/**
|
||||
* Construct from an initializer lists of GaussianFactor shared pointers.
|
||||
* Example:
|
||||
* GaussianFactorGraph graph = { factor1, factor2, factor3 };
|
||||
*/
|
||||
GaussianFactorGraph(std::initializer_list<sharedFactor> factors) : Base(factors) {}
|
||||
|
||||
|
||||
/** Construct from iterator over factors */
|
||||
template<typename ITERATOR>
|
||||
GaussianFactorGraph(ITERATOR firstFactor, ITERATOR lastFactor) : Base(firstFactor, lastFactor) {}
|
||||
|
@ -98,6 +109,7 @@ namespace gtsam {
|
|||
/** Virtual destructor */
|
||||
virtual ~GaussianFactorGraph() {}
|
||||
|
||||
/// @}
|
||||
/// @name Testable
|
||||
/// @{
|
||||
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
#include <boost/format.hpp>
|
||||
#include <boost/make_shared.hpp>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
#include <boost/range/adaptor/transformed.hpp>
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
#include <boost/range/algorithm/copy.hpp>
|
||||
|
@ -41,7 +40,6 @@
|
|||
#include <limits>
|
||||
|
||||
using namespace std;
|
||||
using namespace boost::assign;
|
||||
namespace br {
|
||||
using namespace boost::range;
|
||||
using namespace boost::adaptors;
|
||||
|
@ -49,6 +47,9 @@ using namespace boost::adaptors;
|
|||
|
||||
namespace gtsam {
|
||||
|
||||
// Typedefs used in constructors below.
|
||||
using Dims = std::vector<Eigen::Index>;
|
||||
|
||||
/* ************************************************************************* */
|
||||
void HessianFactor::Allocate(const Scatter& scatter) {
|
||||
gttic(HessianFactor_Allocate);
|
||||
|
@ -74,14 +75,14 @@ HessianFactor::HessianFactor(const Scatter& scatter) {
|
|||
|
||||
/* ************************************************************************* */
|
||||
HessianFactor::HessianFactor() :
|
||||
info_(cref_list_of<1>(1)) {
|
||||
info_(Dims{1}) {
|
||||
assert(info_.rows() == 1);
|
||||
constantTerm() = 0.0;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
HessianFactor::HessianFactor(Key j, const Matrix& G, const Vector& g, double f) :
|
||||
GaussianFactor(cref_list_of<1>(j)), info_(cref_list_of<2>(G.cols())(1)) {
|
||||
HessianFactor::HessianFactor(Key j, const Matrix& G, const Vector& g, double f)
|
||||
: GaussianFactor(KeyVector{j}), info_(Dims{G.cols(), 1}) {
|
||||
if (G.rows() != G.cols() || G.rows() != g.size())
|
||||
throw invalid_argument(
|
||||
"Attempting to construct HessianFactor with inconsistent matrix and/or vector dimensions");
|
||||
|
@ -93,8 +94,8 @@ HessianFactor::HessianFactor(Key j, const Matrix& G, const Vector& g, double f)
|
|||
/* ************************************************************************* */
|
||||
// error is 0.5*(x-mu)'*inv(Sigma)*(x-mu) = 0.5*(x'*G*x - 2*x'*G*mu + mu'*G*mu)
|
||||
// where G = inv(Sigma), g = G*mu, f = mu'*G*mu = mu'*g
|
||||
HessianFactor::HessianFactor(Key j, const Vector& mu, const Matrix& Sigma) :
|
||||
GaussianFactor(cref_list_of<1>(j)), info_(cref_list_of<2>(Sigma.cols())(1)) {
|
||||
HessianFactor::HessianFactor(Key j, const Vector& mu, const Matrix& Sigma)
|
||||
: GaussianFactor(KeyVector{j}), info_(Dims{Sigma.cols(), 1}) {
|
||||
if (Sigma.rows() != Sigma.cols() || Sigma.rows() != mu.size())
|
||||
throw invalid_argument(
|
||||
"Attempting to construct HessianFactor with inconsistent matrix and/or vector dimensions");
|
||||
|
@ -107,8 +108,8 @@ HessianFactor::HessianFactor(Key j, const Vector& mu, const Matrix& Sigma) :
|
|||
HessianFactor::HessianFactor(Key j1, Key j2, const Matrix& G11,
|
||||
const Matrix& G12, const Vector& g1, const Matrix& G22, const Vector& g2,
|
||||
double f) :
|
||||
GaussianFactor(cref_list_of<2>(j1)(j2)), info_(
|
||||
cref_list_of<3>(G11.cols())(G22.cols())(1)) {
|
||||
GaussianFactor(KeyVector{j1,j2}), info_(
|
||||
Dims{G11.cols(),G22.cols(),1}) {
|
||||
info_.setDiagonalBlock(0, G11);
|
||||
info_.setOffDiagonalBlock(0, 1, G12);
|
||||
info_.setDiagonalBlock(1, G22);
|
||||
|
@ -121,8 +122,8 @@ HessianFactor::HessianFactor(Key j1, Key j2, Key j3, const Matrix& G11,
|
|||
const Matrix& G12, const Matrix& G13, const Vector& g1, const Matrix& G22,
|
||||
const Matrix& G23, const Vector& g2, const Matrix& G33, const Vector& g3,
|
||||
double f) :
|
||||
GaussianFactor(cref_list_of<3>(j1)(j2)(j3)), info_(
|
||||
cref_list_of<4>(G11.cols())(G22.cols())(G33.cols())(1)) {
|
||||
GaussianFactor(KeyVector{j1,j2,j3}), info_(
|
||||
Dims{G11.cols(),G22.cols(),G33.cols(),1}) {
|
||||
if (G11.rows() != G11.cols() || G11.rows() != G12.rows()
|
||||
|| G11.rows() != G13.rows() || G11.rows() != g1.size()
|
||||
|| G22.cols() != G12.cols() || G33.cols() != G13.cols()
|
||||
|
|
|
@ -31,7 +31,6 @@
|
|||
#include <gtsam/base/FastMap.h>
|
||||
#include <gtsam/base/cholesky.h>
|
||||
|
||||
#include <boost/assign/list_of.hpp>
|
||||
#include <boost/format.hpp>
|
||||
#include <boost/make_shared.hpp>
|
||||
#include <boost/array.hpp>
|
||||
|
@ -44,13 +43,16 @@
|
|||
#include <stdexcept>
|
||||
|
||||
using namespace std;
|
||||
using namespace boost::assign;
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
// Typedefs used in constructors below.
|
||||
using Dims = std::vector<Eigen::Index>;
|
||||
using Pairs = std::vector<std::pair<Eigen::Index, Matrix>>;
|
||||
|
||||
/* ************************************************************************* */
|
||||
JacobianFactor::JacobianFactor() :
|
||||
Ab_(cref_list_of<1>(1), 0) {
|
||||
Ab_(Dims{1}, 0) {
|
||||
getb().setZero();
|
||||
}
|
||||
|
||||
|
@ -68,29 +70,27 @@ JacobianFactor::JacobianFactor(const GaussianFactor& gf) {
|
|||
|
||||
/* ************************************************************************* */
|
||||
JacobianFactor::JacobianFactor(const Vector& b_in) :
|
||||
Ab_(cref_list_of<1>(1), b_in.size()) {
|
||||
Ab_(Dims{1}, b_in.size()) {
|
||||
getb() = b_in;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
JacobianFactor::JacobianFactor(Key i1, const Matrix& A1, const Vector& b,
|
||||
const SharedDiagonal& model) {
|
||||
fillTerms(cref_list_of<1>(make_pair(i1, A1)), b, model);
|
||||
fillTerms(Pairs{{i1, A1}}, b, model);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
JacobianFactor::JacobianFactor(const Key i1, const Matrix& A1, Key i2,
|
||||
const Matrix& A2, const Vector& b, const SharedDiagonal& model) {
|
||||
fillTerms(cref_list_of<2>(make_pair(i1, A1))(make_pair(i2, A2)), b, model);
|
||||
fillTerms(Pairs{{i1, A1}, {i2, A2}}, b, model);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
JacobianFactor::JacobianFactor(const Key i1, const Matrix& A1, Key i2,
|
||||
const Matrix& A2, Key i3, const Matrix& A3, const Vector& b,
|
||||
const SharedDiagonal& model) {
|
||||
fillTerms(
|
||||
cref_list_of<3>(make_pair(i1, A1))(make_pair(i2, A2))(make_pair(i3, A3)),
|
||||
b, model);
|
||||
fillTerms(Pairs{{i1, A1}, {i2, A2}, {i3, A3}}, b, model);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -27,9 +27,7 @@
|
|||
#include <gtsam/base/Testable.h>
|
||||
|
||||
#include <boost/make_shared.hpp>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
|
||||
using namespace boost::assign;
|
||||
using namespace std;
|
||||
|
||||
namespace gtsam {
|
||||
|
|
|
@ -110,7 +110,7 @@ VectorValues SubgraphPreconditioner::x(const VectorValues& y) const {
|
|||
|
||||
/* ************************************************************************* */
|
||||
double SubgraphPreconditioner::error(const VectorValues& y) const {
|
||||
Errors e(y);
|
||||
Errors e = createErrors(y);
|
||||
VectorValues x = this->x(y);
|
||||
Errors e2 = Ab2_.gaussianErrors(x);
|
||||
return 0.5 * (dot(e, e) + dot(e2,e2));
|
||||
|
@ -129,7 +129,7 @@ VectorValues SubgraphPreconditioner::gradient(const VectorValues &y) const {
|
|||
/* ************************************************************************* */
|
||||
// Apply operator A, A*y = [I;A2*inv(R1)]*y = [y; A2*inv(R1)*y]
|
||||
Errors SubgraphPreconditioner::operator*(const VectorValues &y) const {
|
||||
Errors e(y);
|
||||
Errors e = createErrors(y);
|
||||
VectorValues x = Rc1_.backSubstitute(y); /* x=inv(R1)*y */
|
||||
Errors e2 = Ab2_ * x; /* A2*x */
|
||||
e.splice(e.end(), e2);
|
||||
|
|
|
@ -92,11 +92,8 @@ namespace gtsam {
|
|||
VectorValues() {}
|
||||
|
||||
/// Construct from initializer list.
|
||||
VectorValues(std::initializer_list<std::pair<Key, Vector>> init) {
|
||||
for (const auto& p : init) {
|
||||
values_.insert(p); // Insert key-value pair into map
|
||||
}
|
||||
}
|
||||
VectorValues(std::initializer_list<std::pair<Key, Vector>> init)
|
||||
: values_(init.begin(), init.end()) {}
|
||||
|
||||
/** Merge two VectorValues into one, this is more efficient than inserting
|
||||
* elements one by one. */
|
||||
|
|
|
@ -625,17 +625,6 @@ virtual class GaussianBayesTree {
|
|||
gtsam::GaussianBayesNet* jointBayesNet(size_t key1, size_t key2) const;
|
||||
};
|
||||
|
||||
#include <gtsam/linear/Errors.h>
|
||||
class Errors {
|
||||
//Constructors
|
||||
Errors();
|
||||
Errors(const gtsam::VectorValues& V);
|
||||
|
||||
//Testable
|
||||
void print(string s = "Errors");
|
||||
bool equals(const gtsam::Errors& expected, double tol) const;
|
||||
};
|
||||
|
||||
#include <gtsam/linear/GaussianISAM.h>
|
||||
class GaussianISAM {
|
||||
//Constructor
|
||||
|
|
|
@ -15,9 +15,6 @@
|
|||
* @author Frank Dellaert
|
||||
*/
|
||||
|
||||
#include <boost/assign/std/list.hpp> // for +=
|
||||
using namespace boost::assign;
|
||||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/linear/Errors.h>
|
||||
|
@ -26,16 +23,13 @@ using namespace std;
|
|||
using namespace gtsam;
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( Errors, arithmetic )
|
||||
{
|
||||
Errors e;
|
||||
e += Vector2(1.0,2.0), Vector3(3.0,4.0,5.0);
|
||||
DOUBLES_EQUAL(1+4+9+16+25,dot(e,e),1e-9);
|
||||
TEST(Errors, arithmetic) {
|
||||
Errors e = {Vector2(1.0, 2.0), Vector3(3.0, 4.0, 5.0)};
|
||||
DOUBLES_EQUAL(1 + 4 + 9 + 16 + 25, dot(e, e), 1e-9);
|
||||
|
||||
axpy(2.0, e, e);
|
||||
Errors expected;
|
||||
expected += Vector2(3.0,6.0), Vector3(9.0,12.0,15.0);
|
||||
CHECK(assert_equal(expected,e));
|
||||
const Errors expected = {Vector2(3.0, 6.0), Vector3(9.0, 12.0, 15.0)};
|
||||
CHECK(assert_equal(expected, e));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -25,15 +25,12 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
#include <boost/assign/std/list.hpp> // for operator +=
|
||||
#include <boost/bind/bind.hpp>
|
||||
|
||||
// STL/C++
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
|
||||
using namespace boost::assign;
|
||||
using namespace std::placeholders;
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
@ -41,15 +38,15 @@ using symbol_shorthand::X;
|
|||
|
||||
static const Key _x_ = 11, _y_ = 22, _z_ = 33;
|
||||
|
||||
static GaussianBayesNet smallBayesNet =
|
||||
list_of(GaussianConditional(_x_, Vector1::Constant(9), I_1x1, _y_, I_1x1))(
|
||||
GaussianConditional(_y_, Vector1::Constant(5), I_1x1));
|
||||
static GaussianBayesNet smallBayesNet = {
|
||||
boost::make_shared<GaussianConditional>(_x_, Vector1::Constant(9), I_1x1, _y_, I_1x1),
|
||||
boost::make_shared<GaussianConditional>(_y_, Vector1::Constant(5), I_1x1)};
|
||||
|
||||
static GaussianBayesNet noisyBayesNet =
|
||||
list_of(GaussianConditional(_x_, Vector1::Constant(9), I_1x1, _y_, I_1x1,
|
||||
noiseModel::Isotropic::Sigma(1, 2.0)))(
|
||||
GaussianConditional(_y_, Vector1::Constant(5), I_1x1,
|
||||
noiseModel::Isotropic::Sigma(1, 3.0)));
|
||||
static GaussianBayesNet noisyBayesNet = {
|
||||
boost::make_shared<GaussianConditional>(_x_, Vector1::Constant(9), I_1x1, _y_, I_1x1,
|
||||
noiseModel::Isotropic::Sigma(1, 2.0)),
|
||||
boost::make_shared<GaussianConditional>(_y_, Vector1::Constant(5), I_1x1,
|
||||
noiseModel::Isotropic::Sigma(1, 3.0))};
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( GaussianBayesNet, Matrix )
|
||||
|
@ -115,11 +112,11 @@ TEST( GaussianBayesNet, NoisyMatrix )
|
|||
|
||||
/* ************************************************************************* */
|
||||
TEST(GaussianBayesNet, Optimize) {
|
||||
VectorValues expected =
|
||||
map_list_of<Key, Vector>(_x_, Vector1::Constant(4))(_y_, Vector1::Constant(5));
|
||||
VectorValues actual = smallBayesNet.optimize();
|
||||
const VectorValues expected{{_x_, Vector1::Constant(4)},
|
||||
{_y_, Vector1::Constant(5)}};
|
||||
const VectorValues actual = smallBayesNet.optimize();
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(GaussianBayesNet, NoisyOptimize) {
|
||||
|
@ -127,7 +124,7 @@ TEST(GaussianBayesNet, NoisyOptimize) {
|
|||
Vector d;
|
||||
boost::tie(R, d) = noisyBayesNet.matrix(); // find matrix and RHS
|
||||
const Vector x = R.inverse() * d;
|
||||
VectorValues expected = map_list_of<Key, Vector>(_x_, x.head(1))(_y_, x.tail(1));
|
||||
const VectorValues expected{{_x_, x.head(1)}, {_y_, x.tail(1)}};
|
||||
|
||||
VectorValues actual = noisyBayesNet.optimize();
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
|
@ -136,17 +133,16 @@ TEST(GaussianBayesNet, NoisyOptimize) {
|
|||
/* ************************************************************************* */
|
||||
TEST( GaussianBayesNet, optimizeIncomplete )
|
||||
{
|
||||
static GaussianBayesNet incompleteBayesNet = list_of
|
||||
(GaussianConditional(_x_, Vector1::Constant(9), I_1x1, _y_, I_1x1));
|
||||
static GaussianBayesNet incompleteBayesNet;
|
||||
incompleteBayesNet.emplace_shared<GaussianConditional>(
|
||||
_x_, Vector1::Constant(9), I_1x1, _y_, I_1x1);
|
||||
|
||||
VectorValues solutionForMissing = map_list_of<Key, Vector>
|
||||
(_y_, Vector1::Constant(5));
|
||||
VectorValues solutionForMissing { {_y_, Vector1::Constant(5)} };
|
||||
|
||||
VectorValues actual = incompleteBayesNet.optimize(solutionForMissing);
|
||||
|
||||
VectorValues expected = map_list_of<Key, Vector>
|
||||
(_x_, Vector1::Constant(4))
|
||||
(_y_, Vector1::Constant(5));
|
||||
VectorValues expected{{_x_, Vector1::Constant(4)},
|
||||
{_y_, Vector1::Constant(5)}};
|
||||
|
||||
EXPECT(assert_equal(expected,actual));
|
||||
}
|
||||
|
@ -159,14 +155,11 @@ TEST( GaussianBayesNet, optimize3 )
|
|||
// 5 1 5
|
||||
// NOTE: we are supplying a new RHS here
|
||||
|
||||
VectorValues expected = map_list_of<Key, Vector>
|
||||
(_x_, Vector1::Constant(-1))
|
||||
(_y_, Vector1::Constant(5));
|
||||
VectorValues expected { {_x_, Vector1::Constant(-1)},
|
||||
{_y_, Vector1::Constant(5)} };
|
||||
|
||||
// Test different RHS version
|
||||
VectorValues gx = map_list_of<Key, Vector>
|
||||
(_x_, Vector1::Constant(4))
|
||||
(_y_, Vector1::Constant(5));
|
||||
VectorValues gx{{_x_, Vector1::Constant(4)}, {_y_, Vector1::Constant(5)}};
|
||||
VectorValues actual = smallBayesNet.backSubstitute(gx);
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
|
@ -176,9 +169,9 @@ namespace sampling {
|
|||
static Matrix A1 = (Matrix(2, 2) << 1., 2., 3., 4.).finished();
|
||||
static const Vector2 mean(20, 40), b(10, 10);
|
||||
static const double sigma = 0.01;
|
||||
static const GaussianBayesNet gbn =
|
||||
list_of(GaussianConditional::FromMeanAndStddev(X(0), A1, X(1), b, sigma))(
|
||||
GaussianDensity::FromMeanAndStddev(X(1), mean, sigma));
|
||||
static const GaussianBayesNet gbn = {
|
||||
GaussianConditional::sharedMeanAndStddev(X(0), A1, X(1), b, sigma),
|
||||
GaussianDensity::sharedMeanAndStddev(X(1), mean, sigma)};
|
||||
} // namespace sampling
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -253,13 +246,9 @@ TEST( GaussianBayesNet, backSubstituteTranspose )
|
|||
// x=R'*y, expected=inv(R')*x
|
||||
// 2 = 1 2
|
||||
// 5 1 1 3
|
||||
VectorValues
|
||||
x = map_list_of<Key, Vector>
|
||||
(_x_, Vector1::Constant(2))
|
||||
(_y_, Vector1::Constant(5)),
|
||||
expected = map_list_of<Key, Vector>
|
||||
(_x_, Vector1::Constant(2))
|
||||
(_y_, Vector1::Constant(3));
|
||||
const VectorValues x{{_x_, Vector1::Constant(2)},
|
||||
{_y_, Vector1::Constant(5)}},
|
||||
expected{{_x_, Vector1::Constant(2)}, {_y_, Vector1::Constant(3)}};
|
||||
|
||||
VectorValues actual = smallBayesNet.backSubstituteTranspose(x);
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
|
@ -276,13 +265,8 @@ TEST( GaussianBayesNet, backSubstituteTransposeNoisy )
|
|||
// x=R'*y, expected=inv(R')*x
|
||||
// 2 = 1 2
|
||||
// 5 1 1 3
|
||||
VectorValues
|
||||
x = map_list_of<Key, Vector>
|
||||
(_x_, Vector1::Constant(2))
|
||||
(_y_, Vector1::Constant(5)),
|
||||
expected = map_list_of<Key, Vector>
|
||||
(_x_, Vector1::Constant(4))
|
||||
(_y_, Vector1::Constant(9));
|
||||
VectorValues x{{_x_, Vector1::Constant(2)}, {_y_, Vector1::Constant(5)}},
|
||||
expected{{_x_, Vector1::Constant(4)}, {_y_, Vector1::Constant(9)}};
|
||||
|
||||
VectorValues actual = noisyBayesNet.backSubstituteTranspose(x);
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
|
|
|
@ -23,43 +23,45 @@
|
|||
#include <gtsam/linear/GaussianConditional.h>
|
||||
#include <gtsam/linear/GaussianJunctionTree.h>
|
||||
|
||||
#include <boost/assign/list_of.hpp>
|
||||
#include <boost/assign/std/list.hpp> // for operator +=
|
||||
#include <boost/assign/std/set.hpp> // for operator +=
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
|
||||
using namespace boost::assign;
|
||||
using namespace std::placeholders;
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
||||
using Pairs = std::vector<std::pair<Key, Matrix>>;
|
||||
|
||||
namespace {
|
||||
const Key x1=1, x2=2, x3=3, x4=4;
|
||||
const SharedDiagonal chainNoise = noiseModel::Isotropic::Sigma(1, 0.5);
|
||||
const GaussianFactorGraph chain = list_of
|
||||
(JacobianFactor(x2, (Matrix(1, 1) << 1.).finished(), x1, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise))
|
||||
(JacobianFactor(x2, (Matrix(1, 1) << 1.).finished(), x3, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise))
|
||||
(JacobianFactor(x3, (Matrix(1, 1) << 1.).finished(), x4, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise))
|
||||
(JacobianFactor(x4, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise));
|
||||
const Ordering chainOrdering = Ordering(list_of(x2)(x1)(x3)(x4));
|
||||
const GaussianFactorGraph chain = {
|
||||
boost::make_shared<JacobianFactor>(
|
||||
x2, I_1x1, x1, I_1x1, (Vector(1) << 1.).finished(), chainNoise),
|
||||
boost::make_shared<JacobianFactor>(
|
||||
x2, I_1x1, x3, I_1x1, (Vector(1) << 1.).finished(), chainNoise),
|
||||
boost::make_shared<JacobianFactor>(
|
||||
x3, I_1x1, x4, I_1x1, (Vector(1) << 1.).finished(), chainNoise),
|
||||
boost::make_shared<JacobianFactor>(
|
||||
x4, I_1x1, (Vector(1) << 1.).finished(), chainNoise)};
|
||||
const Ordering chainOrdering {x2, x1, x3, x4};
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Helper functions for below
|
||||
GaussianBayesTreeClique::shared_ptr MakeClique(const GaussianConditional& conditional)
|
||||
GaussianBayesTreeClique::shared_ptr LeafClique(const GaussianConditional& conditional)
|
||||
{
|
||||
return boost::make_shared<GaussianBayesTreeClique>(
|
||||
boost::make_shared<GaussianConditional>(conditional));
|
||||
}
|
||||
|
||||
template<typename CHILDREN>
|
||||
typedef std::vector<GaussianBayesTreeClique::shared_ptr> Children;
|
||||
|
||||
GaussianBayesTreeClique::shared_ptr MakeClique(
|
||||
const GaussianConditional& conditional, const CHILDREN& children)
|
||||
const GaussianConditional& conditional, const Children& children)
|
||||
{
|
||||
GaussianBayesTreeClique::shared_ptr clique =
|
||||
boost::make_shared<GaussianBayesTreeClique>(
|
||||
boost::make_shared<GaussianConditional>(conditional));
|
||||
auto clique = LeafClique(conditional);
|
||||
clique->children.assign(children.begin(), children.end());
|
||||
for(typename CHILDREN::const_iterator child = children.begin(); child != children.end(); ++child)
|
||||
for(Children::const_iterator child = children.begin(); child != children.end(); ++child)
|
||||
(*child)->parent_ = clique;
|
||||
return clique;
|
||||
}
|
||||
|
@ -90,23 +92,25 @@ TEST( GaussianBayesTree, eliminate )
|
|||
EXPECT_LONGS_EQUAL(3, scatter.at(2).key);
|
||||
EXPECT_LONGS_EQUAL(4, scatter.at(3).key);
|
||||
|
||||
Matrix two = (Matrix(1, 1) << 2.).finished();
|
||||
Matrix one = (Matrix(1, 1) << 1.).finished();
|
||||
const Matrix two = (Matrix(1, 1) << 2.).finished();
|
||||
const Matrix one = I_1x1;
|
||||
|
||||
const GaussianConditional gc1(
|
||||
std::map<Key, Matrix>{
|
||||
{x3, (Matrix21() << 2., 0.).finished()},
|
||||
{x4, (Matrix21() << 2., 2.).finished()},
|
||||
},
|
||||
2, Vector2(2., 2.));
|
||||
const GaussianConditional gc2(
|
||||
Pairs{
|
||||
{x2, (Matrix21() << -2. * sqrt(2.), 0.).finished()},
|
||||
{x1, (Matrix21() << -sqrt(2.), -sqrt(2.)).finished()},
|
||||
{x3, (Matrix21() << -sqrt(2.), sqrt(2.)).finished()},
|
||||
},
|
||||
2, (Vector(2) << -2. * sqrt(2.), 0.).finished());
|
||||
|
||||
GaussianBayesTree bayesTree_expected;
|
||||
bayesTree_expected.insertRoot(
|
||||
MakeClique(
|
||||
GaussianConditional(
|
||||
pair_list_of<Key, Matrix>(x3, (Matrix21() << 2., 0.).finished())(
|
||||
x4, (Matrix21() << 2., 2.).finished()), 2, Vector2(2., 2.)),
|
||||
list_of(
|
||||
MakeClique(
|
||||
GaussianConditional(
|
||||
pair_list_of<Key, Matrix>(x2,
|
||||
(Matrix21() << -2. * sqrt(2.), 0.).finished())(x1,
|
||||
(Matrix21() << -sqrt(2.), -sqrt(2.)).finished())(x3,
|
||||
(Matrix21() << -sqrt(2.), sqrt(2.)).finished()), 2,
|
||||
(Vector(2) << -2. * sqrt(2.), 0.).finished())))));
|
||||
bayesTree_expected.insertRoot(MakeClique(gc1, Children{LeafClique(gc2)}));
|
||||
|
||||
EXPECT(assert_equal(bayesTree_expected, bt));
|
||||
}
|
||||
|
@ -114,11 +118,10 @@ TEST( GaussianBayesTree, eliminate )
|
|||
/* ************************************************************************* */
|
||||
TEST( GaussianBayesTree, optimizeMultiFrontal )
|
||||
{
|
||||
VectorValues expected = pair_list_of<Key, Vector>
|
||||
(x1, (Vector(1) << 0.).finished())
|
||||
(x2, (Vector(1) << 1.).finished())
|
||||
(x3, (Vector(1) << 0.).finished())
|
||||
(x4, (Vector(1) << 1.).finished());
|
||||
VectorValues expected = {{x1, (Vector(1) << 0.).finished()},
|
||||
{x2, (Vector(1) << 1.).finished()},
|
||||
{x3, (Vector(1) << 0.).finished()},
|
||||
{x4, (Vector(1) << 1.).finished()}};
|
||||
|
||||
VectorValues actual = chain.eliminateMultifrontal(chainOrdering)->optimize();
|
||||
EXPECT(assert_equal(expected,actual));
|
||||
|
@ -126,40 +129,61 @@ TEST( GaussianBayesTree, optimizeMultiFrontal )
|
|||
|
||||
/* ************************************************************************* */
|
||||
TEST(GaussianBayesTree, complicatedMarginal) {
|
||||
|
||||
// Create the conditionals to go in the BayesTree
|
||||
const GaussianConditional gc1(
|
||||
Pairs{{11, (Matrix(3, 1) << 0.0971, 0, 0).finished()},
|
||||
{12, (Matrix(3, 2) << 0.3171, 0.4387, 0.9502, 0.3816, 0, 0.7655)
|
||||
.finished()}},
|
||||
2, Vector3(0.2638, 0.1455, 0.1361));
|
||||
const GaussianConditional gc2(
|
||||
Pairs{
|
||||
{9, (Matrix(3, 1) << 0.7952, 0, 0).finished()},
|
||||
{10, (Matrix(3, 2) << 0.4456, 0.7547, 0.6463, 0.2760, 0, 0.6797)
|
||||
.finished()},
|
||||
{11, (Matrix(3, 1) << 0.6551, 0.1626, 0.1190).finished()},
|
||||
{12, (Matrix(3, 2) << 0.4984, 0.5853, 0.9597, 0.2238, 0.3404, 0.7513)
|
||||
.finished()}},
|
||||
2, Vector3(0.4314, 0.9106, 0.1818));
|
||||
const GaussianConditional gc3(
|
||||
Pairs{{7, (Matrix(3, 1) << 0.2551, 0, 0).finished()},
|
||||
{8, (Matrix(3, 2) << 0.8909, 0.1386, 0.9593, 0.1493, 0, 0.2575)
|
||||
.finished()},
|
||||
{11, (Matrix(3, 1) << 0.8407, 0.2543, 0.8143).finished()}},
|
||||
2, Vector3(0.3998, 0.2599, 0.8001));
|
||||
const GaussianConditional gc4(
|
||||
Pairs{{5, (Matrix(3, 1) << 0.2435, 0, 0).finished()},
|
||||
{6, (Matrix(3, 2) << 0.4733, 0.1966, 0.3517, 0.2511, 0.8308, 0.0)
|
||||
.finished()},
|
||||
// NOTE the non-upper-triangular form
|
||||
// here since this test was written when we had column permutations
|
||||
// from LDL. The code still works currently (does not enfore
|
||||
// upper-triangularity in this case) but this test will need to be
|
||||
// redone if this stops working in the future
|
||||
{7, (Matrix(3, 1) << 0.5853, 0.5497, 0.9172).finished()},
|
||||
{8, (Matrix(3, 2) << 0.2858, 0.3804, 0.7572, 0.5678, 0.7537, 0.0759)
|
||||
.finished()}},
|
||||
2, Vector3(0.8173, 0.8687, 0.0844));
|
||||
const GaussianConditional gc5(
|
||||
Pairs{{3, (Matrix(3, 1) << 0.0540, 0, 0).finished()},
|
||||
{4, (Matrix(3, 2) << 0.9340, 0.4694, 0.1299, 0.0119, 0, 0.3371)
|
||||
.finished()},
|
||||
{6, (Matrix(3, 2) << 0.1622, 0.5285, 0.7943, 0.1656, 0.3112, 0.6020)
|
||||
.finished()}},
|
||||
2, Vector3(0.9619, 0.0046, 0.7749));
|
||||
const GaussianConditional gc6(
|
||||
Pairs{{1, (Matrix(3, 1) << 0.2630, 0, 0).finished()},
|
||||
{2, (Matrix(3, 2) << 0.7482, 0.2290, 0.4505, 0.9133, 0, 0.1524)
|
||||
.finished()},
|
||||
{5, (Matrix(3, 1) << 0.8258, 0.5383, 0.9961).finished()}},
|
||||
2, Vector3(0.0782, 0.4427, 0.1067));
|
||||
|
||||
// Create the bayes tree:
|
||||
GaussianBayesTree bt;
|
||||
bt.insertRoot(
|
||||
MakeClique(GaussianConditional(pair_list_of<Key, Matrix> (11, (Matrix(3,1) << 0.0971, 0, 0).finished())
|
||||
(12, (Matrix(3,2) << 0.3171, 0.4387, 0.9502, 0.3816, 0, 0.7655).finished()),
|
||||
2, Vector3(0.2638, 0.1455, 0.1361)), list_of
|
||||
(MakeClique(GaussianConditional(pair_list_of<Key, Matrix> (9, (Matrix(3,1) << 0.7952, 0, 0).finished())
|
||||
(10, (Matrix(3,2) << 0.4456, 0.7547, 0.6463, 0.2760, 0, 0.6797).finished())
|
||||
(11, (Matrix(3,1) << 0.6551, 0.1626, 0.1190).finished())
|
||||
(12, (Matrix(3,2) << 0.4984, 0.5853, 0.9597, 0.2238, 0.3404, 0.7513).finished()),
|
||||
2, Vector3(0.4314, 0.9106, 0.1818))))
|
||||
(MakeClique(GaussianConditional(pair_list_of<Key, Matrix> (7, (Matrix(3,1) << 0.2551, 0, 0).finished())
|
||||
(8, (Matrix(3,2) << 0.8909, 0.1386, 0.9593, 0.1493, 0, 0.2575).finished())
|
||||
(11, (Matrix(3,1) << 0.8407, 0.2543, 0.8143).finished()),
|
||||
2, Vector3(0.3998, 0.2599, 0.8001)), list_of
|
||||
(MakeClique(GaussianConditional(pair_list_of<Key, Matrix> (5, (Matrix(3,1) << 0.2435, 0, 0).finished())
|
||||
(6, (Matrix(3,2) << 0.4733, 0.1966, 0.3517, 0.2511, 0.8308, 0.0).finished())
|
||||
// NOTE the non-upper-triangular form
|
||||
// here since this test was written when we had column permutations
|
||||
// from LDL. The code still works currently (does not enfore
|
||||
// upper-triangularity in this case) but this test will need to be
|
||||
// redone if this stops working in the future
|
||||
(7, (Matrix(3,1) << 0.5853, 0.5497, 0.9172).finished())
|
||||
(8, (Matrix(3,2) << 0.2858, 0.3804, 0.7572, 0.5678, 0.7537, 0.0759).finished()),
|
||||
2, Vector3(0.8173, 0.8687, 0.0844)), list_of
|
||||
(MakeClique(GaussianConditional(pair_list_of<Key, Matrix> (3, (Matrix(3,1) << 0.0540, 0, 0).finished())
|
||||
(4, (Matrix(3,2) << 0.9340, 0.4694, 0.1299, 0.0119, 0, 0.3371).finished())
|
||||
(6, (Matrix(3,2) << 0.1622, 0.5285, 0.7943, 0.1656, 0.3112, 0.6020).finished()),
|
||||
2, Vector3(0.9619, 0.0046, 0.7749))))
|
||||
(MakeClique(GaussianConditional(pair_list_of<Key, Matrix> (1, (Matrix(3,1) << 0.2630, 0, 0).finished())
|
||||
(2, (Matrix(3,2) << 0.7482, 0.2290, 0.4505, 0.9133, 0, 0.1524).finished())
|
||||
(5, (Matrix(3,1) << 0.8258, 0.5383, 0.9961).finished()),
|
||||
2, Vector3(0.0782, 0.4427, 0.1067))))))))));
|
||||
bt.insertRoot(MakeClique(
|
||||
gc1, Children{LeafClique(gc2),
|
||||
MakeClique(gc3, Children{MakeClique(
|
||||
gc4, Children{LeafClique(gc5),
|
||||
LeafClique(gc6)})})}));
|
||||
|
||||
// Marginal on 5
|
||||
Matrix expectedCov = (Matrix(1,1) << 236.5166).finished();
|
||||
|
@ -201,61 +225,33 @@ double computeError(const GaussianBayesTree& gbt, const Vector10& values) {
|
|||
|
||||
/* ************************************************************************* */
|
||||
TEST(GaussianBayesTree, ComputeSteepestDescentPointBT) {
|
||||
const GaussianConditional gc1(
|
||||
Pairs{{2, (Matrix(6, 2) << 31.0, 32.0, 0.0, 34.0, 0.0, 0.0, 0.0, 0.0, 0.0,
|
||||
0.0, 0.0, 0.0)
|
||||
.finished()},
|
||||
{3, (Matrix(6, 2) << 35.0, 36.0, 37.0, 38.0, 41.0, 42.0, 0.0, 44.0,
|
||||
0.0, 0.0, 0.0, 0.0)
|
||||
.finished()},
|
||||
{4, (Matrix(6, 2) << 0.0, 0.0, 0.0, 0.0, 45.0, 46.0, 47.0, 48.0,
|
||||
51.0, 52.0, 0.0, 54.0)
|
||||
.finished()}},
|
||||
3, (Vector(6) << 29.0, 30.0, 39.0, 40.0, 49.0, 50.0).finished()),
|
||||
gc2(Pairs{{0, (Matrix(4, 2) << 3.0, 4.0, 0.0, 6.0, 0.0, 0.0, 0.0, 0.0)
|
||||
.finished()},
|
||||
{1, (Matrix(4, 2) << 0.0, 0.0, 0.0, 0.0, 17.0, 18.0, 0.0, 20.0)
|
||||
.finished()},
|
||||
{2, (Matrix(4, 2) << 0.0, 0.0, 0.0, 0.0, 21.0, 22.0, 23.0, 24.0)
|
||||
.finished()},
|
||||
{3, (Matrix(4, 2) << 7.0, 8.0, 9.0, 10.0, 0.0, 0.0, 0.0, 0.0)
|
||||
.finished()},
|
||||
{4, (Matrix(4, 2) << 11.0, 12.0, 13.0, 14.0, 25.0, 26.0, 27.0,
|
||||
28.0)
|
||||
.finished()}},
|
||||
2, (Vector(4) << 1.0, 2.0, 15.0, 16.0).finished());
|
||||
|
||||
// Create an arbitrary Bayes Tree
|
||||
GaussianBayesTree bt;
|
||||
bt.insertRoot(MakeClique(GaussianConditional(
|
||||
pair_list_of<Key, Matrix>
|
||||
(2, (Matrix(6, 2) <<
|
||||
31.0,32.0,
|
||||
0.0,34.0,
|
||||
0.0,0.0,
|
||||
0.0,0.0,
|
||||
0.0,0.0,
|
||||
0.0,0.0).finished())
|
||||
(3, (Matrix(6, 2) <<
|
||||
35.0,36.0,
|
||||
37.0,38.0,
|
||||
41.0,42.0,
|
||||
0.0,44.0,
|
||||
0.0,0.0,
|
||||
0.0,0.0).finished())
|
||||
(4, (Matrix(6, 2) <<
|
||||
0.0,0.0,
|
||||
0.0,0.0,
|
||||
45.0,46.0,
|
||||
47.0,48.0,
|
||||
51.0,52.0,
|
||||
0.0,54.0).finished()),
|
||||
3, (Vector(6) << 29.0,30.0,39.0,40.0,49.0,50.0).finished()), list_of
|
||||
(MakeClique(GaussianConditional(
|
||||
pair_list_of<Key, Matrix>
|
||||
(0, (Matrix(4, 2) <<
|
||||
3.0,4.0,
|
||||
0.0,6.0,
|
||||
0.0,0.0,
|
||||
0.0,0.0).finished())
|
||||
(1, (Matrix(4, 2) <<
|
||||
0.0,0.0,
|
||||
0.0,0.0,
|
||||
17.0,18.0,
|
||||
0.0,20.0).finished())
|
||||
(2, (Matrix(4, 2) <<
|
||||
0.0,0.0,
|
||||
0.0,0.0,
|
||||
21.0,22.0,
|
||||
23.0,24.0).finished())
|
||||
(3, (Matrix(4, 2) <<
|
||||
7.0,8.0,
|
||||
9.0,10.0,
|
||||
0.0,0.0,
|
||||
0.0,0.0).finished())
|
||||
(4, (Matrix(4, 2) <<
|
||||
11.0,12.0,
|
||||
13.0,14.0,
|
||||
25.0,26.0,
|
||||
27.0,28.0).finished()),
|
||||
2, (Vector(4) << 1.0,2.0,15.0,16.0).finished())))));
|
||||
bt.insertRoot(MakeClique(gc1, Children{LeafClique(gc2)}));
|
||||
|
||||
// Compute the Hessian numerically
|
||||
Matrix hessian = numericalHessian<Vector10>(
|
||||
|
@ -276,12 +272,11 @@ TEST(GaussianBayesTree, ComputeSteepestDescentPointBT) {
|
|||
Vector expected = gradient * step;
|
||||
|
||||
// Known steepest descent point from Bayes' net version
|
||||
VectorValues expectedFromBN = pair_list_of<Key, Vector>
|
||||
(0, Vector2(0.000129034, 0.000688183))
|
||||
(1, Vector2(0.0109679, 0.0253767))
|
||||
(2, Vector2(0.0680441, 0.114496))
|
||||
(3, Vector2(0.16125, 0.241294))
|
||||
(4, Vector2(0.300134, 0.423233));
|
||||
VectorValues expectedFromBN{{0, Vector2(0.000129034, 0.000688183)},
|
||||
{1, Vector2(0.0109679, 0.0253767)},
|
||||
{2, Vector2(0.0680441, 0.114496)},
|
||||
{3, Vector2(0.16125, 0.241294)},
|
||||
{4, Vector2(0.300134, 0.423233)}};
|
||||
|
||||
// Compute the steepest descent point with the dogleg function
|
||||
VectorValues actual = bt.optimizeGradientSearch();
|
||||
|
|
|
@ -26,11 +26,7 @@
|
|||
#include <gtsam/linear/GaussianDensity.h>
|
||||
#include <gtsam/linear/GaussianBayesNet.h>
|
||||
|
||||
#include <boost/assign/std/list.hpp>
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
#include <boost/assign/list_inserter.hpp>
|
||||
#include <boost/make_shared.hpp>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
|
@ -38,7 +34,6 @@
|
|||
|
||||
using namespace gtsam;
|
||||
using namespace std;
|
||||
using namespace boost::assign;
|
||||
using symbol_shorthand::X;
|
||||
using symbol_shorthand::Y;
|
||||
|
||||
|
@ -48,6 +43,8 @@ static Matrix R = (Matrix(2, 2) <<
|
|||
-12.1244, -5.1962,
|
||||
0., 4.6904).finished();
|
||||
|
||||
using Dims = std::vector<Eigen::Index>;
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(GaussianConditional, constructor)
|
||||
{
|
||||
|
@ -64,11 +61,7 @@ TEST(GaussianConditional, constructor)
|
|||
Vector d = Vector2(1.0, 2.0);
|
||||
SharedDiagonal s = noiseModel::Diagonal::Sigmas(Vector2(3.0, 4.0));
|
||||
|
||||
vector<pair<Key, Matrix> > terms = pair_list_of
|
||||
(1, R)
|
||||
(3, S1)
|
||||
(5, S2)
|
||||
(7, S3);
|
||||
vector<pair<Key, Matrix> > terms = {{1, R}, {3, S1}, {5, S2}, {7, S3}};
|
||||
|
||||
GaussianConditional actual(terms, 1, d, s);
|
||||
|
||||
|
@ -223,14 +216,10 @@ TEST( GaussianConditional, solve )
|
|||
Vector sx1(2); sx1 << 1.0, 1.0;
|
||||
Vector sl1(2); sl1 << 1.0, 1.0;
|
||||
|
||||
VectorValues expected = map_list_of
|
||||
(1, expectedX)
|
||||
(2, sx1)
|
||||
(10, sl1);
|
||||
VectorValues expected = {{1, expectedX}, {2, sx1}, {10, sl1}};
|
||||
|
||||
VectorValues solution = map_list_of
|
||||
(2, sx1) // parents
|
||||
(10, sl1);
|
||||
VectorValues solution = {{2, sx1}, // parents
|
||||
{10, sl1}};
|
||||
solution.insert(cg.solve(solution));
|
||||
|
||||
EXPECT(assert_equal(expected, solution, tol));
|
||||
|
@ -240,7 +229,7 @@ TEST( GaussianConditional, solve )
|
|||
TEST( GaussianConditional, solve_simple )
|
||||
{
|
||||
// 2 variables, frontal has dim=4
|
||||
VerticalBlockMatrix blockMatrix(list_of(4)(2)(1), 4);
|
||||
VerticalBlockMatrix blockMatrix(Dims{4, 2, 1}, 4);
|
||||
blockMatrix.matrix() <<
|
||||
1.0, 0.0, 2.0, 0.0, 3.0, 0.0, 0.1,
|
||||
0.0, 1.0, 0.0, 2.0, 0.0, 3.0, 0.2,
|
||||
|
@ -248,18 +237,16 @@ TEST( GaussianConditional, solve_simple )
|
|||
0.0, 0.0, 0.0, 3.0, 0.0, 4.0, 0.4;
|
||||
|
||||
// solve system as a non-multifrontal version first
|
||||
GaussianConditional cg(list_of(1)(2), 1, blockMatrix);
|
||||
GaussianConditional cg(KeyVector{1,2}, 1, blockMatrix);
|
||||
|
||||
// partial solution
|
||||
Vector sx1 = Vector2(9.0, 10.0);
|
||||
|
||||
// elimination order: 1, 2
|
||||
VectorValues actual = map_list_of
|
||||
(2, sx1); // parent
|
||||
VectorValues actual = {{2, sx1}}; // parent
|
||||
|
||||
VectorValues expected = map_list_of<Key, Vector>
|
||||
(2, sx1)
|
||||
(1, (Vector(4) << -3.1,-3.4,-11.9,-13.2).finished());
|
||||
VectorValues expected = {
|
||||
{2, sx1}, {1, (Vector(4) << -3.1, -3.4, -11.9, -13.2).finished()}};
|
||||
|
||||
// verify indices/size
|
||||
EXPECT_LONGS_EQUAL(2, (long)cg.size());
|
||||
|
@ -274,7 +261,7 @@ TEST( GaussianConditional, solve_simple )
|
|||
TEST( GaussianConditional, solve_multifrontal )
|
||||
{
|
||||
// create full system, 3 variables, 2 frontals, all 2 dim
|
||||
VerticalBlockMatrix blockMatrix(list_of(2)(2)(2)(1), 4);
|
||||
VerticalBlockMatrix blockMatrix(Dims{2, 2, 2, 1}, 4);
|
||||
blockMatrix.matrix() <<
|
||||
1.0, 0.0, 2.0, 0.0, 3.0, 0.0, 0.1,
|
||||
0.0, 1.0, 0.0, 2.0, 0.0, 3.0, 0.2,
|
||||
|
@ -282,7 +269,7 @@ TEST( GaussianConditional, solve_multifrontal )
|
|||
0.0, 0.0, 0.0, 3.0, 0.0, 4.0, 0.4;
|
||||
|
||||
// 3 variables, all dim=2
|
||||
GaussianConditional cg(list_of(1)(2)(10), 2, blockMatrix);
|
||||
GaussianConditional cg(KeyVector{1, 2, 10}, 2, blockMatrix);
|
||||
|
||||
EXPECT(assert_equal(Vector(blockMatrix.full().rightCols(1)), cg.d()));
|
||||
|
||||
|
@ -290,13 +277,10 @@ TEST( GaussianConditional, solve_multifrontal )
|
|||
Vector sl1 = Vector2(9.0, 10.0);
|
||||
|
||||
// elimination order; _x_, _x1_, _l1_
|
||||
VectorValues actual = map_list_of
|
||||
(10, sl1); // parent
|
||||
VectorValues actual = {{10, sl1}}; // parent
|
||||
|
||||
VectorValues expected = map_list_of<Key, Vector>
|
||||
(1, Vector2(-3.1,-3.4))
|
||||
(2, Vector2(-11.9,-13.2))
|
||||
(10, sl1);
|
||||
VectorValues expected = {
|
||||
{1, Vector2(-3.1, -3.4)}, {2, Vector2(-11.9, -13.2)}, {10, sl1}};
|
||||
|
||||
// verify indices/size
|
||||
EXPECT_LONGS_EQUAL(3, (long)cg.size());
|
||||
|
@ -322,21 +306,18 @@ TEST( GaussianConditional, solveTranspose ) {
|
|||
d2(0) = 5;
|
||||
|
||||
// define nodes and specify in reverse topological sort (i.e. parents last)
|
||||
GaussianBayesNet cbn = list_of
|
||||
(GaussianConditional(1, d1, R11, 2, S12))
|
||||
(GaussianConditional(1, d2, R22));
|
||||
GaussianBayesNet cbn;
|
||||
cbn.emplace_shared<GaussianConditional>(1, d1, R11, 2, S12);
|
||||
cbn.emplace_shared<GaussianConditional>(1, d2, R22);
|
||||
|
||||
// x=R'*y, y=inv(R')*x
|
||||
// 2 = 1 2
|
||||
// 5 1 1 3
|
||||
|
||||
VectorValues
|
||||
x = map_list_of<Key, Vector>
|
||||
(1, (Vector(1) << 2.).finished())
|
||||
(2, (Vector(1) << 5.).finished()),
|
||||
y = map_list_of<Key, Vector>
|
||||
(1, (Vector(1) << 2.).finished())
|
||||
(2, (Vector(1) << 3.).finished());
|
||||
VectorValues x = {{1, (Vector(1) << 2.).finished()},
|
||||
{2, (Vector(1) << 5.).finished()}},
|
||||
y = {{1, (Vector(1) << 2.).finished()},
|
||||
{2, (Vector(1) << 3.).finished()}};
|
||||
|
||||
// test functional version
|
||||
VectorValues actual = cbn.backSubstituteTranspose(x);
|
||||
|
@ -395,7 +376,7 @@ TEST(GaussianConditional, FromMeanAndStddev) {
|
|||
const Vector2 b(20, 40), x0(1, 2), x1(3, 4), x2(5, 6);
|
||||
const double sigma = 3;
|
||||
|
||||
VectorValues values = map_list_of(X(0), x0)(X(1), x1)(X(2), x2);
|
||||
VectorValues values{{X(0), x0}, {X(1), x1}, {X(2), x2}};
|
||||
|
||||
auto conditional1 =
|
||||
GaussianConditional::FromMeanAndStddev(X(0), A1, X(1), b, sigma);
|
||||
|
|
|
@ -26,10 +26,6 @@
|
|||
#include <gtsam/base/debug.h>
|
||||
#include <gtsam/base/VerticalBlockMatrix.h>
|
||||
|
||||
#include <boost/assign/list_of.hpp>
|
||||
#include <boost/assign/std/list.hpp> // for operator +=
|
||||
using namespace boost::assign;
|
||||
|
||||
#include <gtsam/base/TestableAssertions.h>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
|
@ -212,8 +208,9 @@ TEST(GaussianFactorGraph, gradient) {
|
|||
// 2*f(x) = 100*(x1+c[X(1)])^2 + 100*(x2-x1-[0.2;-0.1])^2 + 25*(l1-x1-[0.0;0.2])^2 +
|
||||
// 25*(l1-x2-[-0.2;0.3])^2
|
||||
// worked out: df/dx1 = 100*[0.1;0.1] + 100*[0.2;-0.1]) + 25*[0.0;0.2] = [10+20;10-10+5] = [30;5]
|
||||
VectorValues expected = map_list_of<Key, Vector>(1, Vector2(5.0, -12.5))(2, Vector2(30.0, 5.0))(
|
||||
0, Vector2(-25.0, 17.5));
|
||||
VectorValues expected{{1, Vector2(5.0, -12.5)},
|
||||
{2, Vector2(30.0, 5.0)},
|
||||
{0, Vector2(-25.0, 17.5)}};
|
||||
|
||||
// Check the gradient at delta=0
|
||||
VectorValues zero = VectorValues::Zero(expected);
|
||||
|
@ -231,8 +228,8 @@ TEST(GaussianFactorGraph, gradient) {
|
|||
TEST(GaussianFactorGraph, transposeMultiplication) {
|
||||
GaussianFactorGraph A = createSimpleGaussianFactorGraph();
|
||||
|
||||
Errors e;
|
||||
e += Vector2(0.0, 0.0), Vector2(15.0, 0.0), Vector2(0.0, -5.0), Vector2(-7.5, -5.0);
|
||||
Errors e = {Vector2(0.0, 0.0), Vector2(15.0, 0.0), Vector2(0.0, -5.0),
|
||||
Vector2(-7.5, -5.0)};
|
||||
|
||||
VectorValues expected;
|
||||
expected.insert(1, Vector2(-37.5, -50.0));
|
||||
|
@ -288,7 +285,7 @@ TEST(GaussianFactorGraph, matrices2) {
|
|||
TEST(GaussianFactorGraph, multiplyHessianAdd) {
|
||||
GaussianFactorGraph gfg = createSimpleGaussianFactorGraph();
|
||||
|
||||
VectorValues x = map_list_of<Key, Vector>(0, Vector2(1, 2))(1, Vector2(3, 4))(2, Vector2(5, 6));
|
||||
const VectorValues x{{0, Vector2(1, 2)}, {1, Vector2(3, 4)}, {2, Vector2(5, 6)}};
|
||||
|
||||
VectorValues expected;
|
||||
expected.insert(0, Vector2(-450, -450));
|
||||
|
@ -307,8 +304,8 @@ TEST(GaussianFactorGraph, multiplyHessianAdd) {
|
|||
/* ************************************************************************* */
|
||||
static GaussianFactorGraph createGaussianFactorGraphWithHessianFactor() {
|
||||
GaussianFactorGraph gfg = createSimpleGaussianFactorGraph();
|
||||
gfg += HessianFactor(1, 2, 100*I_2x2, Z_2x2, Vector2(0.0, 1.0),
|
||||
400*I_2x2, Vector2(1.0, 1.0), 3.0);
|
||||
gfg.emplace_shared<HessianFactor>(1, 2, 100 * I_2x2, Z_2x2, Vector2(0.0, 1.0),
|
||||
400 * I_2x2, Vector2(1.0, 1.0), 3.0);
|
||||
return gfg;
|
||||
}
|
||||
|
||||
|
@ -326,8 +323,7 @@ TEST(GaussianFactorGraph, multiplyHessianAdd2) {
|
|||
Y << -450, -450, 300, 400, 2950, 3450;
|
||||
EXPECT(assert_equal(Y, AtA * X));
|
||||
|
||||
VectorValues x = map_list_of<Key, Vector>(0, Vector2(1, 2))(1, Vector2(3, 4))(2, Vector2(5, 6));
|
||||
|
||||
const VectorValues x {{0, Vector2(1, 2)}, {1, Vector2(3, 4)}, {2, Vector2(5, 6)}};
|
||||
VectorValues expected;
|
||||
expected.insert(0, Vector2(-450, -450));
|
||||
expected.insert(1, Vector2(300, 400));
|
||||
|
@ -371,10 +367,10 @@ TEST(GaussianFactorGraph, gradientAtZero) {
|
|||
/* ************************************************************************* */
|
||||
TEST(GaussianFactorGraph, clone) {
|
||||
// 2 variables, frontal has dim=4
|
||||
VerticalBlockMatrix blockMatrix(list_of(4)(2)(1), 4);
|
||||
VerticalBlockMatrix blockMatrix(KeyVector{4, 2, 1}, 4);
|
||||
blockMatrix.matrix() << 1.0, 0.0, 2.0, 0.0, 3.0, 0.0, 0.1, 0.0, 1.0, 0.0, 2.0, 0.0, 3.0, 0.2, 0.0,
|
||||
0.0, 3.0, 0.0, 4.0, 0.0, 0.3, 0.0, 0.0, 0.0, 3.0, 0.0, 4.0, 0.4;
|
||||
GaussianConditional cg(list_of(1)(2), 1, blockMatrix);
|
||||
GaussianConditional cg(KeyVector{1, 2}, 1, blockMatrix);
|
||||
|
||||
GaussianFactorGraph init_graph = createGaussianFactorGraphWithHessianFactor();
|
||||
init_graph.push_back(GaussianFactor::shared_ptr()); /// Add null factor
|
||||
|
|
|
@ -25,11 +25,6 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/assign/list_of.hpp>
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
#include <boost/assign/std/map.hpp>
|
||||
using namespace boost::assign;
|
||||
|
||||
#include <vector>
|
||||
#include <utility>
|
||||
|
||||
|
@ -38,6 +33,8 @@ using namespace gtsam;
|
|||
|
||||
const double tol = 1e-5;
|
||||
|
||||
using Dims = std::vector<Eigen::Index>; // For constructing block matrices
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(HessianFactor, Slot)
|
||||
{
|
||||
|
@ -61,8 +58,8 @@ TEST(HessianFactor, emptyConstructor)
|
|||
/* ************************************************************************* */
|
||||
TEST(HessianFactor, ConversionConstructor)
|
||||
{
|
||||
HessianFactor expected(list_of(0)(1),
|
||||
SymmetricBlockMatrix(list_of(2)(4)(1), (Matrix(7,7) <<
|
||||
HessianFactor expected(KeyVector{0, 1},
|
||||
SymmetricBlockMatrix(Dims{2, 4, 1}, (Matrix(7,7) <<
|
||||
125.0000, 0.0, -25.0000, 0.0, -100.0000, 0.0, 25.0000,
|
||||
0.0, 125.0000, 0.0, -25.0000, 0.0, -100.0000, -17.5000,
|
||||
-25.0000, 0.0, 25.0000, 0.0, 0.0, 0.0, -5.0000,
|
||||
|
@ -85,9 +82,7 @@ TEST(HessianFactor, ConversionConstructor)
|
|||
|
||||
HessianFactor actual(jacobian);
|
||||
|
||||
VectorValues values = pair_list_of<Key, Vector>
|
||||
(0, Vector2(1.0, 2.0))
|
||||
(1, (Vector(4) << 3.0, 4.0, 5.0, 6.0).finished());
|
||||
VectorValues values{{0, Vector2(1.0, 2.0)}, {1, Vector4(3.0, 4.0, 5.0, 6.0)}};
|
||||
|
||||
EXPECT_LONGS_EQUAL(2, (long)actual.size());
|
||||
EXPECT(assert_equal(expected, actual, 1e-9));
|
||||
|
@ -108,7 +103,7 @@ TEST(HessianFactor, Constructor1)
|
|||
EXPECT(assert_equal(g, Vector(factor.linearTerm())));
|
||||
EXPECT_LONGS_EQUAL(1, (long)factor.size());
|
||||
|
||||
VectorValues dx = pair_list_of<Key, Vector>(0, Vector2(1.5, 2.5));
|
||||
VectorValues dx{{0, Vector2(1.5, 2.5)}};
|
||||
|
||||
// error 0.5*(f - 2*x'*g + x'*G*x)
|
||||
double expected = 80.375;
|
||||
|
@ -150,9 +145,7 @@ TEST(HessianFactor, Constructor2)
|
|||
Vector dx0 = (Vector(1) << 0.5).finished();
|
||||
Vector dx1 = Vector2(1.5, 2.5);
|
||||
|
||||
VectorValues dx = pair_list_of
|
||||
(0, dx0)
|
||||
(1, dx1);
|
||||
VectorValues dx{{0, dx0}, {1, dx1}};
|
||||
|
||||
HessianFactor factor(0, 1, G11, G12, g1, G22, g2, f);
|
||||
|
||||
|
@ -171,10 +164,7 @@ TEST(HessianFactor, Constructor2)
|
|||
EXPECT(assert_equal(G22, factor.info().diagonalBlock(1)));
|
||||
|
||||
// Check case when vector values is larger than factor
|
||||
VectorValues dxLarge = pair_list_of<Key, Vector>
|
||||
(0, dx0)
|
||||
(1, dx1)
|
||||
(2, Vector2(0.1, 0.2));
|
||||
VectorValues dxLarge {{0, dx0}, {1, dx1}, {2, Vector2(0.1, 0.2)}};
|
||||
EXPECT_DOUBLES_EQUAL(expected, factor.error(dxLarge), 1e-10);
|
||||
}
|
||||
|
||||
|
@ -200,11 +190,7 @@ TEST(HessianFactor, Constructor3)
|
|||
Vector dx1 = Vector2(1.5, 2.5);
|
||||
Vector dx2 = Vector3(1.5, 2.5, 3.5);
|
||||
|
||||
VectorValues dx = pair_list_of
|
||||
(0, dx0)
|
||||
(1, dx1)
|
||||
(2, dx2);
|
||||
|
||||
VectorValues dx {{0, dx0}, {1, dx1}, {2, dx2}};
|
||||
HessianFactor factor(0, 1, 2, G11, G12, G13, g1, G22, G23, g2, G33, g3, f);
|
||||
|
||||
double expected = 371.3750;
|
||||
|
@ -247,10 +233,7 @@ TEST(HessianFactor, ConstructorNWay)
|
|||
Vector dx1 = Vector2(1.5, 2.5);
|
||||
Vector dx2 = Vector3(1.5, 2.5, 3.5);
|
||||
|
||||
VectorValues dx = pair_list_of
|
||||
(0, dx0)
|
||||
(1, dx1)
|
||||
(2, dx2);
|
||||
VectorValues dx {{0, dx0}, {1, dx1}, {2, dx2}};
|
||||
|
||||
KeyVector js {0, 1, 2};
|
||||
std::vector<Matrix> Gs;
|
||||
|
@ -309,7 +292,7 @@ TEST(HessianFactor, CombineAndEliminate1) {
|
|||
hessian.augmentedInformation(), 1e-9));
|
||||
|
||||
// perform elimination on jacobian
|
||||
Ordering ordering = list_of(1);
|
||||
Ordering ordering {1};
|
||||
GaussianConditional::shared_ptr expectedConditional;
|
||||
JacobianFactor::shared_ptr expectedFactor;
|
||||
boost::tie(expectedConditional, expectedFactor) = jacobian.eliminate(ordering);
|
||||
|
@ -372,7 +355,7 @@ TEST(HessianFactor, CombineAndEliminate2) {
|
|||
hessian.augmentedInformation(), 1e-9));
|
||||
|
||||
// perform elimination on jacobian
|
||||
Ordering ordering = list_of(0);
|
||||
Ordering ordering {0};
|
||||
GaussianConditional::shared_ptr expectedConditional;
|
||||
JacobianFactor::shared_ptr expectedFactor;
|
||||
boost::tie(expectedConditional, expectedFactor) = //
|
||||
|
@ -437,10 +420,10 @@ TEST(HessianFactor, eliminate2 )
|
|||
|
||||
// eliminate the combined factor
|
||||
HessianFactor::shared_ptr combinedLF_Chol(new HessianFactor(combined));
|
||||
GaussianFactorGraph combinedLFG_Chol = list_of(combinedLF_Chol);
|
||||
GaussianFactorGraph combinedLFG_Chol {combinedLF_Chol};
|
||||
|
||||
std::pair<GaussianConditional::shared_ptr, HessianFactor::shared_ptr> actual_Chol =
|
||||
EliminateCholesky(combinedLFG_Chol, Ordering(list_of(0)));
|
||||
EliminateCholesky(combinedLFG_Chol, Ordering{0});
|
||||
|
||||
// create expected Conditional Gaussian
|
||||
double oldSigma = 0.0894427; // from when R was made unit
|
||||
|
@ -483,8 +466,8 @@ TEST(HessianFactor, combine) {
|
|||
0.0, -8.94427191).finished();
|
||||
Vector b = Vector2(2.23606798,-1.56524758);
|
||||
SharedDiagonal model = noiseModel::Diagonal::Sigmas(Vector::Ones(2));
|
||||
GaussianFactor::shared_ptr f(new JacobianFactor(0, A0, 1, A1, 2, A2, b, model));
|
||||
GaussianFactorGraph factors = list_of(f);
|
||||
GaussianFactorGraph factors{
|
||||
boost::make_shared<JacobianFactor>(0, A0, 1, A1, 2, A2, b, model)};
|
||||
|
||||
// Form Ab' * Ab
|
||||
HessianFactor actual(factors);
|
||||
|
@ -514,7 +497,7 @@ TEST(HessianFactor, gradientAtZero)
|
|||
HessianFactor factor(0, 1, G11, G12, g1, G22, g2, f);
|
||||
|
||||
// test gradient at zero
|
||||
VectorValues expectedG = pair_list_of<Key, Vector>(0, -g1) (1, -g2);
|
||||
VectorValues expectedG{{0, -g1}, {1, -g2}};
|
||||
Matrix A; Vector b; boost::tie(A,b) = factor.jacobian();
|
||||
KeyVector keys {0, 1};
|
||||
EXPECT(assert_equal(-A.transpose()*b, expectedG.vector(keys)));
|
||||
|
@ -537,7 +520,7 @@ TEST(HessianFactor, gradient)
|
|||
// test gradient
|
||||
Vector x0 = (Vector(1) << 3.0).finished();
|
||||
Vector x1 = (Vector(2) << -3.5, 7.1).finished();
|
||||
VectorValues x = pair_list_of<Key, Vector>(0, x0) (1, x1);
|
||||
VectorValues x {{0, x0}, {1, x1}};
|
||||
|
||||
Vector expectedGrad0 = (Vector(1) << 10.0).finished();
|
||||
Vector expectedGrad1 = (Vector(2) << 4.5, 16.1).finished();
|
||||
|
|
|
@ -25,26 +25,24 @@
|
|||
#include <gtsam/linear/GaussianConditional.h>
|
||||
#include <gtsam/linear/VectorValues.h>
|
||||
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
#include <boost/range/iterator_range.hpp>
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using namespace boost::assign;
|
||||
|
||||
using Dims = std::vector<Eigen::Index>; // For constructing block matrices
|
||||
|
||||
namespace {
|
||||
namespace simple {
|
||||
// Terms we'll use
|
||||
const vector<pair<Key, Matrix> > terms = list_of<pair<Key,Matrix> >
|
||||
(make_pair(5, Matrix3::Identity()))
|
||||
(make_pair(10, 2*Matrix3::Identity()))
|
||||
(make_pair(15, 3*Matrix3::Identity()));
|
||||
const vector<pair<Key, Matrix> > terms{
|
||||
{5, I_3x3}, {10, 2 * I_3x3}, {15, 3 * I_3x3}};
|
||||
|
||||
// RHS and sigmas
|
||||
const Vector b = Vector3(1., 2., 3.);
|
||||
const SharedDiagonal noise = noiseModel::Diagonal::Sigmas(Vector3(0.5, 0.5, 0.5));
|
||||
// RHS and sigmas
|
||||
const Vector b = Vector3(1., 2., 3.);
|
||||
const SharedDiagonal noise =
|
||||
noiseModel::Diagonal::Sigmas(Vector3(0.5, 0.5, 0.5));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -128,7 +126,7 @@ TEST(JacobianFactor, constructors_and_accessors)
|
|||
// VerticalBlockMatrix constructor
|
||||
JacobianFactor expected(
|
||||
boost::make_iterator_range(terms.begin(), terms.begin() + 3), b, noise);
|
||||
VerticalBlockMatrix blockMatrix(list_of(3)(3)(3)(1), 3);
|
||||
VerticalBlockMatrix blockMatrix(Dims{3, 3, 3, 1}, 3);
|
||||
blockMatrix(0) = terms[0].second;
|
||||
blockMatrix(1) = terms[1].second;
|
||||
blockMatrix(2) = terms[2].second;
|
||||
|
@ -191,22 +189,25 @@ Key keyX(10), keyY(8), keyZ(12);
|
|||
double sigma1 = 0.1;
|
||||
Matrix A11 = I_2x2;
|
||||
Vector2 b1(2, -1);
|
||||
JacobianFactor factor1(keyX, A11, b1, noiseModel::Isotropic::Sigma(2, sigma1));
|
||||
auto factor1 = boost::make_shared<JacobianFactor>(
|
||||
keyX, A11, b1, noiseModel::Isotropic::Sigma(2, sigma1));
|
||||
|
||||
double sigma2 = 0.5;
|
||||
Matrix A21 = -2 * I_2x2;
|
||||
Matrix A22 = 3 * I_2x2;
|
||||
Vector2 b2(4, -5);
|
||||
JacobianFactor factor2(keyX, A21, keyY, A22, b2, noiseModel::Isotropic::Sigma(2, sigma2));
|
||||
auto factor2 = boost::make_shared<JacobianFactor>(
|
||||
keyX, A21, keyY, A22, b2, noiseModel::Isotropic::Sigma(2, sigma2));
|
||||
|
||||
double sigma3 = 1.0;
|
||||
Matrix A32 = -4 * I_2x2;
|
||||
Matrix A33 = 5 * I_2x2;
|
||||
Vector2 b3(3, -6);
|
||||
JacobianFactor factor3(keyY, A32, keyZ, A33, b3, noiseModel::Isotropic::Sigma(2, sigma3));
|
||||
auto factor3 = boost::make_shared<JacobianFactor>(
|
||||
keyY, A32, keyZ, A33, b3, noiseModel::Isotropic::Sigma(2, sigma3));
|
||||
|
||||
GaussianFactorGraph factors(list_of(factor1)(factor2)(factor3));
|
||||
Ordering ordering(list_of(keyX)(keyY)(keyZ));
|
||||
const GaussianFactorGraph factors { factor1, factor2, factor3 };
|
||||
const Ordering ordering { keyX, keyY, keyZ };
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -436,11 +437,11 @@ TEST(JacobianFactor, eliminate)
|
|||
Vector9 sigmas; sigmas << s1, s0, s2;
|
||||
|
||||
JacobianFactor combinedFactor(0, A0, 1, A1, b, noiseModel::Diagonal::Sigmas(sigmas, true));
|
||||
GaussianFactorGraph::EliminationResult expected = combinedFactor.eliminate(list_of(0));
|
||||
GaussianFactorGraph::EliminationResult expected = combinedFactor.eliminate(Ordering{0});
|
||||
JacobianFactor::shared_ptr expectedJacobian = boost::dynamic_pointer_cast<
|
||||
JacobianFactor>(expected.second);
|
||||
|
||||
GaussianFactorGraph::EliminationResult actual = EliminateQR(gfg, list_of(0));
|
||||
GaussianFactorGraph::EliminationResult actual = EliminateQR(gfg, Ordering{0});
|
||||
JacobianFactor::shared_ptr actualJacobian = boost::dynamic_pointer_cast<
|
||||
JacobianFactor>(actual.second);
|
||||
|
||||
|
@ -487,7 +488,7 @@ TEST(JacobianFactor, eliminate2 )
|
|||
|
||||
// eliminate the combined factor
|
||||
pair<GaussianConditional::shared_ptr, JacobianFactor::shared_ptr>
|
||||
actual = combined.eliminate(Ordering(list_of(2)));
|
||||
actual = combined.eliminate(Ordering{2});
|
||||
|
||||
// create expected Conditional Gaussian
|
||||
double oldSigma = 0.0894427; // from when R was made unit
|
||||
|
@ -539,11 +540,11 @@ TEST(JacobianFactor, EliminateQR)
|
|||
// Create factor graph
|
||||
const SharedDiagonal sig_4D = noiseModel::Isotropic::Sigma(4, 0.5);
|
||||
const SharedDiagonal sig_2D = noiseModel::Isotropic::Sigma(2, 0.5);
|
||||
GaussianFactorGraph factors = list_of
|
||||
(JacobianFactor(list_of(3)(5)(7)(9)(11), VerticalBlockMatrix(list_of(2)(2)(2)(2)(2)(1), Ab.block(0, 0, 4, 11)), sig_4D))
|
||||
(JacobianFactor(list_of(5)(7)(9)(11), VerticalBlockMatrix(list_of(2)(2)(2)(2)(1), Ab.block(4, 2, 4, 9)), sig_4D))
|
||||
(JacobianFactor(list_of(7)(9)(11), VerticalBlockMatrix(list_of(2)(2)(2)(1), Ab.block(8, 4, 4, 7)), sig_4D))
|
||||
(JacobianFactor(list_of(11), VerticalBlockMatrix(list_of(2)(1), Ab.block(12, 8, 2, 3)), sig_2D));
|
||||
GaussianFactorGraph factors = {
|
||||
boost::make_shared<JacobianFactor>(KeyVector{3, 5, 7, 9, 11}, VerticalBlockMatrix(Dims{2, 2, 2, 2, 2, 1}, Ab.block(0, 0, 4, 11)), sig_4D),
|
||||
boost::make_shared<JacobianFactor>(KeyVector{5, 7, 9, 11}, VerticalBlockMatrix(Dims{2, 2, 2, 2, 1}, Ab.block(4, 2, 4, 9)), sig_4D),
|
||||
boost::make_shared<JacobianFactor>(KeyVector{7, 9, 11}, VerticalBlockMatrix(Dims{2, 2, 2, 1}, Ab.block(8, 4, 4, 7)), sig_4D),
|
||||
boost::make_shared<JacobianFactor>(KeyVector{11}, VerticalBlockMatrix(Dims{2, 1}, Ab.block(12, 8, 2, 3)), sig_2D)};
|
||||
|
||||
// extract the dense matrix for the graph
|
||||
Matrix actualDense = factors.augmentedJacobian();
|
||||
|
@ -562,12 +563,12 @@ TEST(JacobianFactor, EliminateQR)
|
|||
0., 0., 0., 0., 0., 0., 0., 0., 8.2503, 3.3757, 6.8476,
|
||||
0., 0., 0., 0., 0., 0., 0., 0., 0., -5.7095, -0.0090).finished();
|
||||
|
||||
GaussianConditional expectedFragment(list_of(3)(5)(7)(9)(11), 3,
|
||||
VerticalBlockMatrix(list_of(2)(2)(2)(2)(2)(1), R.topRows(6)),
|
||||
GaussianConditional expectedFragment(KeyVector{3,5,7,9,11}, 3,
|
||||
VerticalBlockMatrix(Dims{2, 2, 2, 2, 2, 1}, R.topRows(6)),
|
||||
noiseModel::Unit::Create(6));
|
||||
|
||||
// Eliminate (3 frontal variables, 6 scalar columns) using QR !!!!
|
||||
GaussianFactorGraph::EliminationResult actual = EliminateQR(factors, list_of(3)(5)(7));
|
||||
GaussianFactorGraph::EliminationResult actual = EliminateQR(factors, Ordering{3, 5, 7});
|
||||
const JacobianFactor &actualJF = dynamic_cast<const JacobianFactor&>(*actual.second);
|
||||
|
||||
EXPECT(assert_equal(expectedFragment, *actual.first, 0.001));
|
||||
|
@ -589,7 +590,7 @@ TEST ( JacobianFactor, constraint_eliminate1 )
|
|||
|
||||
// eliminate it
|
||||
pair<GaussianConditional::shared_ptr, JacobianFactor::shared_ptr>
|
||||
actual = lc.eliminate(list_of(1));
|
||||
actual = lc.eliminate(Ordering{1});
|
||||
|
||||
// verify linear factor is a null ptr
|
||||
EXPECT(actual.second->empty());
|
||||
|
@ -617,7 +618,7 @@ TEST ( JacobianFactor, constraint_eliminate2 )
|
|||
|
||||
// eliminate x and verify results
|
||||
pair<GaussianConditional::shared_ptr, JacobianFactor::shared_ptr>
|
||||
actual = lc.eliminate(list_of(1));
|
||||
actual = lc.eliminate(Ordering{1});
|
||||
|
||||
// LF should be empty
|
||||
// It's tricky to create Eigen matrices that are only zero along one dimension
|
||||
|
@ -648,7 +649,7 @@ TEST(JacobianFactor, OverdeterminedEliminate) {
|
|||
SharedDiagonal diagonal = noiseModel::Diagonal::Sigmas(sigmas);
|
||||
|
||||
JacobianFactor factor(0, Ab.leftCols(3), Ab.col(3), diagonal);
|
||||
GaussianFactorGraph::EliminationResult actual = factor.eliminate(list_of(0));
|
||||
GaussianFactorGraph::EliminationResult actual = factor.eliminate(Ordering{0});
|
||||
|
||||
Matrix expectedRd(3, 4);
|
||||
expectedRd << -2.64575131, -2.64575131, -2.64575131, -2.64575131, //
|
||||
|
|
|
@ -22,20 +22,17 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
|
||||
#include <iostream>
|
||||
#include <limits>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using namespace noiseModel;
|
||||
using namespace boost::assign;
|
||||
|
||||
static const double kSigma = 2, kInverseSigma = 1.0 / kSigma,
|
||||
kVariance = kSigma * kSigma, prc = 1.0 / kVariance;
|
||||
static const Matrix R = Matrix3::Identity() * kInverseSigma;
|
||||
static const Matrix kCovariance = Matrix3::Identity() * kVariance;
|
||||
static const Matrix R = I_3x3 * kInverseSigma;
|
||||
static const Matrix kCovariance = I_3x3 * kVariance;
|
||||
static const Vector3 kSigmas(kSigma, kSigma, kSigma);
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -723,7 +720,7 @@ TEST(NoiseModel, NonDiagonalGaussian)
|
|||
const Matrix3 info = R.transpose() * R;
|
||||
const Matrix3 cov = info.inverse();
|
||||
const Vector3 e(1, 1, 1), white = R * e;
|
||||
Matrix I = Matrix3::Identity();
|
||||
Matrix I = I_3x3;
|
||||
|
||||
|
||||
{
|
||||
|
|
|
@ -21,13 +21,8 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
#include <boost/assign/std/map.hpp>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using namespace boost::assign;
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(RegularHessianFactor, Constructors)
|
||||
|
@ -36,8 +31,7 @@ TEST(RegularHessianFactor, Constructors)
|
|||
// 0.5*|x0 + x1 + x3 - [1;2]|^2 = 0.5*|A*x-b|^2, with A=[I I I]
|
||||
Matrix A1 = I_2x2, A2 = I_2x2, A3 = I_2x2;
|
||||
Vector2 b(1,2);
|
||||
vector<pair<Key, Matrix> > terms;
|
||||
terms += make_pair(0, A1), make_pair(1, A2), make_pair(3, A3);
|
||||
const vector<pair<Key, Matrix> > terms {{0, A1}, {1, A2}, {3, A3}};
|
||||
RegularJacobianFactor<2> jf(terms, b);
|
||||
|
||||
// Test conversion from JacobianFactor
|
||||
|
@ -65,8 +59,8 @@ TEST(RegularHessianFactor, Constructors)
|
|||
|
||||
// Test n-way constructor
|
||||
KeyVector keys {0, 1, 3};
|
||||
vector<Matrix> Gs; Gs += G11, G12, G13, G22, G23, G33;
|
||||
vector<Vector> gs; gs += g1, g2, g3;
|
||||
vector<Matrix> Gs {G11, G12, G13, G22, G23, G33};
|
||||
vector<Vector> gs {g1, g2, g3};
|
||||
RegularHessianFactor<2> factor3(keys, Gs, gs, f);
|
||||
EXPECT(assert_equal(factor, factor3));
|
||||
|
||||
|
@ -82,7 +76,7 @@ TEST(RegularHessianFactor, Constructors)
|
|||
|
||||
// Test constructor from Information matrix
|
||||
Matrix info = factor.augmentedInformation();
|
||||
vector<size_t> dims; dims += 2, 2, 2;
|
||||
vector<size_t> dims {2, 2, 2};
|
||||
SymmetricBlockMatrix sym(dims, info, true);
|
||||
RegularHessianFactor<2> factor6(keys, sym);
|
||||
EXPECT(assert_equal(factor, factor6));
|
||||
|
@ -97,10 +91,7 @@ TEST(RegularHessianFactor, Constructors)
|
|||
Vector Y(6); Y << 9, 12, 9, 12, 9, 12;
|
||||
EXPECT(assert_equal(Y,AtA*X));
|
||||
|
||||
VectorValues x = map_list_of<Key, Vector>
|
||||
(0, Vector2(1,2))
|
||||
(1, Vector2(3,4))
|
||||
(3, Vector2(5,6));
|
||||
VectorValues x{{0, Vector2(1, 2)}, {1, Vector2(3, 4)}, {3, Vector2(5, 6)}};
|
||||
|
||||
VectorValues expected;
|
||||
expected.insert(0, Y.segment<2>(0));
|
||||
|
|
|
@ -24,14 +24,11 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
#include <boost/range/iterator_range.hpp>
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using namespace boost::assign;
|
||||
|
||||
static const size_t fixedDim = 3;
|
||||
static const size_t nrKeys = 3;
|
||||
|
@ -40,10 +37,8 @@ static const size_t nrKeys = 3;
|
|||
namespace {
|
||||
namespace simple {
|
||||
// Terms we'll use
|
||||
const vector<pair<Key, Matrix> > terms = list_of<pair<Key,Matrix> >
|
||||
(make_pair(0, Matrix3::Identity()))
|
||||
(make_pair(1, 2*Matrix3::Identity()))
|
||||
(make_pair(2, 3*Matrix3::Identity()));
|
||||
const vector<pair<Key, Matrix> > terms{
|
||||
{0, 1 * I_3x3}, {1, 2 * I_3x3}, {2, 3 * I_3x3}};
|
||||
|
||||
// RHS and sigmas
|
||||
const Vector b = (Vector(3) << 1., 2., 3.).finished();
|
||||
|
@ -52,10 +47,8 @@ namespace {
|
|||
|
||||
namespace simple2 {
|
||||
// Terms
|
||||
const vector<pair<Key, Matrix> > terms2 = list_of<pair<Key,Matrix> >
|
||||
(make_pair(0, 2*Matrix3::Identity()))
|
||||
(make_pair(1, 4*Matrix3::Identity()))
|
||||
(make_pair(2, 6*Matrix3::Identity()));
|
||||
const vector<pair<Key, Matrix> > terms2{
|
||||
{0, 2 * I_3x3}, {1, 4 * I_3x3}, {2, 6 * I_3x3}};
|
||||
|
||||
// RHS
|
||||
const Vector b2 = (Vector(3) << 2., 4., 6.).finished();
|
||||
|
|
|
@ -22,9 +22,6 @@
|
|||
#include <gtsam/linear/GaussianISAM.h>
|
||||
#include <gtsam/linear/NoiseModel.h>
|
||||
|
||||
#include <boost/assign/list_of.hpp>
|
||||
using namespace boost::assign;
|
||||
|
||||
#include <gtsam/base/serializationTestHelpers.h>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
|
@ -228,13 +225,13 @@ TEST(Serialization, gaussian_bayes_net) {
|
|||
/* ************************************************************************* */
|
||||
TEST (Serialization, gaussian_bayes_tree) {
|
||||
const Key x1=1, x2=2, x3=3, x4=4;
|
||||
const Ordering chainOrdering = Ordering(list_of(x2)(x1)(x3)(x4));
|
||||
const Ordering chainOrdering {x2, x1, x3, x4};
|
||||
const SharedDiagonal chainNoise = noiseModel::Isotropic::Sigma(1, 0.5);
|
||||
const GaussianFactorGraph chain = list_of
|
||||
(JacobianFactor(x2, (Matrix(1, 1) << 1.).finished(), x1, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise))
|
||||
(JacobianFactor(x2, (Matrix(1, 1) << 1.).finished(), x3, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise))
|
||||
(JacobianFactor(x3, (Matrix(1, 1) << 1.).finished(), x4, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise))
|
||||
(JacobianFactor(x4, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise));
|
||||
const GaussianFactorGraph chain = {
|
||||
boost::make_shared<JacobianFactor>(x2, (Matrix(1, 1) << 1.).finished(), x1, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise),
|
||||
boost::make_shared<JacobianFactor>(x2, (Matrix(1, 1) << 1.).finished(), x3, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise),
|
||||
boost::make_shared<JacobianFactor>(x3, (Matrix(1, 1) << 1.).finished(), x4, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise),
|
||||
boost::make_shared<JacobianFactor>(x4, (Matrix(1, 1) << 1.).finished(), (Vector(1) << 1.).finished(), chainNoise)};
|
||||
|
||||
GaussianBayesTree init = *chain.eliminateMultifrontal(chainOrdering);
|
||||
GaussianBayesTree expected = *chain.eliminateMultifrontal(chainOrdering);
|
||||
|
|
|
@ -21,9 +21,6 @@
|
|||
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||
#include <gtsam/linear/SparseEigen.h>
|
||||
|
||||
#include <boost/assign/list_of.hpp>
|
||||
using boost::assign::list_of;
|
||||
|
||||
#include <gtsam/base/TestableAssertions.h>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
|
@ -49,7 +46,7 @@ TEST(SparseEigen, sparseJacobianEigen) {
|
|||
EXPECT(assert_equal(gfg.augmentedJacobian(), Matrix(sparseResult)));
|
||||
|
||||
// Call sparseJacobian with optional ordering...
|
||||
auto ordering = Ordering(list_of(x45)(x123));
|
||||
const Ordering ordering{x45, x123};
|
||||
|
||||
// Eigen Sparse with optional ordering
|
||||
EXPECT(assert_equal(gfg.augmentedJacobian(ordering),
|
||||
|
|
|
@ -21,14 +21,11 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
|
||||
#include <sstream>
|
||||
|
||||
using namespace std;
|
||||
using namespace boost::assign;
|
||||
using boost::adaptors::map_keys;
|
||||
using namespace gtsam;
|
||||
|
||||
|
|
|
@ -18,11 +18,9 @@
|
|||
#include <gtsam/base/timing.h>
|
||||
#include <gtsam/navigation/ScenarioRunner.h>
|
||||
|
||||
#include <boost/assign.hpp>
|
||||
#include <cmath>
|
||||
|
||||
using namespace std;
|
||||
using namespace boost::assign;
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
|
|
@ -29,12 +29,9 @@
|
|||
#include <gtsam/base/utilities.h> // boost::index_sequence
|
||||
|
||||
#include <boost/serialization/base_object.hpp>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
using boost::assign::cref_list_of;
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
||||
/**
|
||||
|
|
|
@ -29,9 +29,6 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/assign/list_of.hpp>
|
||||
using boost::assign::list_of;
|
||||
using boost::assign::map_list_of;
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
@ -253,8 +250,7 @@ TEST(AdaptAutoDiff, SnavelyExpression) {
|
|||
internal::upAligned(RecordSize) + P.traceSize() + X.traceSize(),
|
||||
expression.traceSize());
|
||||
|
||||
set<Key> expected = list_of(1)(2);
|
||||
|
||||
const set<Key> expected{1, 2};
|
||||
EXPECT(expected == expression.keys());
|
||||
}
|
||||
|
||||
|
|
|
@ -25,10 +25,6 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/assign/list_of.hpp>
|
||||
using boost::assign::list_of;
|
||||
using boost::assign::map_list_of;
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
||||
|
@ -90,7 +86,7 @@ Vector f3(const Point3& p, OptionalJacobian<Eigen::Dynamic, 3> H) {
|
|||
return p;
|
||||
}
|
||||
Point3_ pointExpression(1);
|
||||
set<Key> expected = list_of(1);
|
||||
const set<Key> expected{1};
|
||||
} // namespace unary
|
||||
|
||||
// Create a unary expression that takes another expression as a single argument.
|
||||
|
@ -190,14 +186,14 @@ TEST(Expression, BinaryToDouble) {
|
|||
/* ************************************************************************* */
|
||||
// Check keys of an expression created from class method.
|
||||
TEST(Expression, BinaryKeys) {
|
||||
set<Key> expected = list_of(1)(2);
|
||||
const set<Key> expected{1, 2};
|
||||
EXPECT(expected == binary::p_cam.keys());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Check dimensions by calling `dims` method.
|
||||
TEST(Expression, BinaryDimensions) {
|
||||
map<Key, int> actual, expected = map_list_of<Key, int>(1, 6)(2, 3);
|
||||
map<Key, int> actual, expected{{1, 6}, {2, 3}};
|
||||
binary::p_cam.dims(actual);
|
||||
EXPECT(actual == expected);
|
||||
}
|
||||
|
@ -227,14 +223,14 @@ Expression<Point2> uv_hat(uncalibrate<Cal3_S2>, K, projection);
|
|||
/* ************************************************************************* */
|
||||
// keys
|
||||
TEST(Expression, TreeKeys) {
|
||||
set<Key> expected = list_of(1)(2)(3);
|
||||
const set<Key> expected{1, 2, 3};
|
||||
EXPECT(expected == tree::uv_hat.keys());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// dimensions
|
||||
TEST(Expression, TreeDimensions) {
|
||||
map<Key, int> actual, expected = map_list_of<Key, int>(1, 6)(2, 3)(3, 5);
|
||||
map<Key, int> actual, expected{{1, 6}, {2, 3}, {3, 5}};
|
||||
tree::uv_hat.dims(actual);
|
||||
EXPECT(actual == expected);
|
||||
}
|
||||
|
@ -265,7 +261,7 @@ TEST(Expression, compose1) {
|
|||
Rot3_ R3 = R1 * R2;
|
||||
|
||||
// Check keys
|
||||
set<Key> expected = list_of(1)(2);
|
||||
const set<Key> expected{1, 2};
|
||||
EXPECT(expected == R3.keys());
|
||||
}
|
||||
|
||||
|
@ -277,7 +273,7 @@ TEST(Expression, compose2) {
|
|||
Rot3_ R3 = R1 * R2;
|
||||
|
||||
// Check keys
|
||||
set<Key> expected = list_of(1);
|
||||
const set<Key> expected{1};
|
||||
EXPECT(expected == R3.keys());
|
||||
}
|
||||
|
||||
|
@ -289,7 +285,7 @@ TEST(Expression, compose3) {
|
|||
Rot3_ R3 = R1 * R2;
|
||||
|
||||
// Check keys
|
||||
set<Key> expected = list_of(3);
|
||||
const set<Key> expected{3};
|
||||
EXPECT(expected == R3.keys());
|
||||
}
|
||||
|
||||
|
@ -302,7 +298,7 @@ TEST(Expression, compose4) {
|
|||
Double_ R3 = R1 * R2;
|
||||
|
||||
// Check keys
|
||||
set<Key> expected = list_of(1);
|
||||
const set<Key> expected{1};
|
||||
EXPECT(expected == R3.keys());
|
||||
}
|
||||
|
||||
|
@ -326,7 +322,7 @@ TEST(Expression, ternary) {
|
|||
Rot3_ ABC(composeThree, A, B, C);
|
||||
|
||||
// Check keys
|
||||
set<Key> expected = list_of(1)(2)(3);
|
||||
const set<Key> expected {1, 2, 3};
|
||||
EXPECT(expected == ABC.keys());
|
||||
}
|
||||
|
||||
|
@ -336,10 +332,10 @@ TEST(Expression, ScalarMultiply) {
|
|||
const Key key(67);
|
||||
const Point3_ expr = 23 * Point3_(key);
|
||||
|
||||
set<Key> expected_keys = list_of(key);
|
||||
const set<Key> expected_keys{key};
|
||||
EXPECT(expected_keys == expr.keys());
|
||||
|
||||
map<Key, int> actual_dims, expected_dims = map_list_of<Key, int>(key, 3);
|
||||
map<Key, int> actual_dims, expected_dims {{key, 3}};
|
||||
expr.dims(actual_dims);
|
||||
EXPECT(actual_dims == expected_dims);
|
||||
|
||||
|
@ -367,10 +363,10 @@ TEST(Expression, BinarySum) {
|
|||
const Key key(67);
|
||||
const Point3_ sum_ = Point3_(key) + Point3_(Point3(1, 1, 1));
|
||||
|
||||
set<Key> expected_keys = list_of(key);
|
||||
const set<Key> expected_keys{key};
|
||||
EXPECT(expected_keys == sum_.keys());
|
||||
|
||||
map<Key, int> actual_dims, expected_dims = map_list_of<Key, int>(key, 3);
|
||||
map<Key, int> actual_dims, expected_dims {{key, 3}};
|
||||
sum_.dims(actual_dims);
|
||||
EXPECT(actual_dims == expected_dims);
|
||||
|
||||
|
@ -462,7 +458,7 @@ TEST(Expression, UnaryOfSum) {
|
|||
const Point3_ sum_ = Point3_(key1) + Point3_(key2);
|
||||
const Double_ norm_(>sam::norm3, sum_);
|
||||
|
||||
map<Key, int> actual_dims, expected_dims = map_list_of<Key, int>(key1, 3)(key2, 3);
|
||||
map<Key, int> actual_dims, expected_dims = {{key1, 3}, {key2, 3}};
|
||||
norm_.dims(actual_dims);
|
||||
EXPECT(actual_dims == expected_dims);
|
||||
|
||||
|
@ -485,7 +481,7 @@ TEST(Expression, WeightedSum) {
|
|||
const Key key1(42), key2(67);
|
||||
const Point3_ weighted_sum_ = 17 * Point3_(key1) + 23 * Point3_(key2);
|
||||
|
||||
map<Key, int> actual_dims, expected_dims = map_list_of<Key, int>(key1, 3)(key2, 3);
|
||||
map<Key, int> actual_dims, expected_dims {{key1, 3}, {key2, 3}};
|
||||
weighted_sum_.dims(actual_dims);
|
||||
EXPECT(actual_dims == expected_dims);
|
||||
|
||||
|
|
|
@ -15,10 +15,7 @@
|
|||
#include <gtsam/nonlinear/LinearContainerFactor.h>
|
||||
#include <gtsam/slam/BetweenFactor.h>
|
||||
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace boost::assign;
|
||||
using namespace gtsam;
|
||||
|
||||
const gtsam::noiseModel::Diagonal::shared_ptr diag_model2 = noiseModel::Diagonal::Sigmas(Vector2(1.0, 1.0));
|
||||
|
|
|
@ -25,15 +25,11 @@
|
|||
#include <gtsam/base/TestableAssertions.h>
|
||||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <boost/assign/std/list.hpp> // for operator +=
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
#include <boost/bind/bind.hpp>
|
||||
#include <stdexcept>
|
||||
#include <limits>
|
||||
#include <type_traits>
|
||||
|
||||
using namespace boost::assign;
|
||||
using namespace std::placeholders;
|
||||
using namespace gtsam;
|
||||
using namespace std;
|
||||
|
@ -224,9 +220,8 @@ TEST(Values, retract_full)
|
|||
config0.insert(key1, Vector3(1.0, 2.0, 3.0));
|
||||
config0.insert(key2, Vector3(5.0, 6.0, 7.0));
|
||||
|
||||
VectorValues delta = pair_list_of<Key, Vector>
|
||||
(key1, Vector3(1.0, 1.1, 1.2))
|
||||
(key2, Vector3(1.3, 1.4, 1.5));
|
||||
VectorValues delta {{key1, Vector3(1.0, 1.1, 1.2)},
|
||||
{key2, Vector3(1.3, 1.4, 1.5)}};
|
||||
|
||||
Values expected;
|
||||
expected.insert(key1, Vector3(2.0, 3.1, 4.2));
|
||||
|
@ -243,8 +238,7 @@ TEST(Values, retract_partial)
|
|||
config0.insert(key1, Vector3(1.0, 2.0, 3.0));
|
||||
config0.insert(key2, Vector3(5.0, 6.0, 7.0));
|
||||
|
||||
VectorValues delta = pair_list_of<Key, Vector>
|
||||
(key2, Vector3(1.3, 1.4, 1.5));
|
||||
VectorValues delta {{key2, Vector3(1.3, 1.4, 1.5)}};
|
||||
|
||||
Values expected;
|
||||
expected.insert(key1, Vector3(1.0, 2.0, 3.0));
|
||||
|
@ -279,9 +273,8 @@ TEST(Values, localCoordinates)
|
|||
valuesA.insert(key1, Vector3(1.0, 2.0, 3.0));
|
||||
valuesA.insert(key2, Vector3(5.0, 6.0, 7.0));
|
||||
|
||||
VectorValues expDelta = pair_list_of<Key, Vector>
|
||||
(key1, Vector3(0.1, 0.2, 0.3))
|
||||
(key2, Vector3(0.4, 0.5, 0.6));
|
||||
VectorValues expDelta{{key1, Vector3(0.1, 0.2, 0.3)},
|
||||
{key2, Vector3(0.4, 0.5, 0.6)}};
|
||||
|
||||
Values valuesB = valuesA.retract(expDelta);
|
||||
|
||||
|
|
|
@ -18,8 +18,6 @@
|
|||
#include <gtsam/base/Testable.h>
|
||||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <boost/assign/std/list.hpp> // for operator +=
|
||||
using namespace boost::assign;
|
||||
|
||||
using namespace gtsam;
|
||||
using namespace std;
|
||||
|
|
|
@ -22,8 +22,6 @@
|
|||
#include <gtsam/linear/JacobianFactor.h>
|
||||
#include <gtsam/nonlinear/NonlinearFactor.h>
|
||||
|
||||
#include <boost/assign/list_of.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
/**
|
||||
* The ShonanGaugeFactor creates a constraint on a single SO(n) to avoid moving
|
||||
|
@ -59,7 +57,7 @@ public:
|
|||
*/
|
||||
ShonanGaugeFactor(Key key, size_t p, size_t d = 3,
|
||||
boost::optional<double> gamma = boost::none)
|
||||
: NonlinearFactor(boost::assign::cref_list_of<1>(key)) {
|
||||
: NonlinearFactor(KeyVector{key}) {
|
||||
if (p < d) {
|
||||
throw std::invalid_argument("ShonanGaugeFactor must have p>=d.");
|
||||
}
|
||||
|
|
|
@ -41,7 +41,6 @@
|
|||
#include <gtsam/base/Vector.h>
|
||||
#include <gtsam/base/types.h>
|
||||
|
||||
#include <boost/assign/list_inserter.hpp>
|
||||
#include <boost/filesystem/operations.hpp>
|
||||
#include <boost/filesystem/path.hpp>
|
||||
#include <boost/optional.hpp>
|
||||
|
|
|
@ -28,10 +28,8 @@
|
|||
#include <gtsam/inference/Symbol.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
#include <boost/shared_ptr.hpp>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
using namespace boost::assign;
|
||||
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
|
@ -456,8 +454,8 @@ TEST(GeneralSFMFactor, BinaryJacobianFactor) {
|
|||
using namespace noiseModel;
|
||||
Rot2 R = Rot2::fromAngle(0.3);
|
||||
Matrix2 cov = R.matrix() * R.matrix().transpose();
|
||||
models += SharedNoiseModel(), Unit::Create(2), //
|
||||
Isotropic::Sigma(2, 0.5), Constrained::All(2), Gaussian::Covariance(cov);
|
||||
models = {SharedNoiseModel(), Unit::Create(2), Isotropic::Sigma(2, 0.5),
|
||||
Constrained::All(2), Gaussian::Covariance(cov)};
|
||||
}
|
||||
|
||||
// Now loop over all these noise models
|
||||
|
|
|
@ -29,7 +29,6 @@
|
|||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using namespace boost::assign;
|
||||
|
||||
static Symbol x0('x', 0), x1('x', 1), x2('x', 2), x3('x', 3);
|
||||
static SharedNoiseModel model(noiseModel::Isotropic::Sigma(6, 0.1));
|
||||
|
@ -156,7 +155,7 @@ TEST( InitializePose3, orientationsGradientSymbolicGraph ) {
|
|||
/* *************************************************************************** */
|
||||
TEST( InitializePose3, singleGradient ) {
|
||||
Rot3 R1 = Rot3();
|
||||
Matrix M = Matrix3::Zero();
|
||||
Matrix M = Z_3x3;
|
||||
M(0,1) = -1; M(1,0) = 1; M(2,2) = 1;
|
||||
Rot3 R2 = Rot3(M);
|
||||
double a = 6.010534238540223;
|
||||
|
|
|
@ -31,7 +31,6 @@
|
|||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using namespace boost::assign;
|
||||
|
||||
static Symbol x0('x', 0), x1('x', 1), x2('x', 2), x3('x', 3);
|
||||
static SharedNoiseModel model(noiseModel::Isotropic::Sigma(3, 0.1));
|
||||
|
|
|
@ -25,10 +25,6 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
#include <boost/assign/std.hpp>
|
||||
|
||||
using namespace boost::assign;
|
||||
using namespace std::placeholders;
|
||||
using namespace gtsam;
|
||||
using namespace std;
|
||||
|
|
|
@ -27,14 +27,11 @@
|
|||
#include <gtsam/linear/GaussianFactor.h>
|
||||
#include <gtsam/base/timing.h>
|
||||
|
||||
#include <boost/assign/list_of.hpp>
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
#include <boost/range/iterator_range.hpp>
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
using namespace std;
|
||||
using namespace boost::assign;
|
||||
using namespace gtsam;
|
||||
|
||||
// F
|
||||
|
@ -68,17 +65,15 @@ TEST( regularImplicitSchurFactor, addHessianMultiply ) {
|
|||
Matrix3 P = (E.transpose() * E).inverse();
|
||||
|
||||
double alpha = 0.5;
|
||||
VectorValues xvalues = map_list_of //
|
||||
(0, Vector::Constant(6, 2))//
|
||||
(1, Vector::Constant(6, 4))//
|
||||
(2, Vector::Constant(6, 0))// distractor
|
||||
(3, Vector::Constant(6, 8));
|
||||
VectorValues xvalues{{0, Vector::Constant(6, 2)}, //
|
||||
{1, Vector::Constant(6, 4)}, //
|
||||
{2, Vector::Constant(6, 0)}, // distractor
|
||||
{3, Vector::Constant(6, 8)}};
|
||||
|
||||
VectorValues yExpected = map_list_of//
|
||||
(0, Vector::Constant(6, 27))//
|
||||
(1, Vector::Constant(6, -40))//
|
||||
(2, Vector::Constant(6, 0))// distractor
|
||||
(3, Vector::Constant(6, 279));
|
||||
VectorValues yExpected{{0, Vector::Constant(6, 27)}, //
|
||||
{1, Vector::Constant(6, -40)}, //
|
||||
{2, Vector::Constant(6, 0)}, // distractor
|
||||
{3, Vector::Constant(6, 279)}};
|
||||
|
||||
// Create full F
|
||||
size_t M=4, m = 3, d = 6;
|
||||
|
|
|
@ -12,11 +12,9 @@
|
|||
#include <gtsam/base/numericalDerivative.h>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
#include <vector>
|
||||
|
||||
using namespace std;
|
||||
using namespace boost::assign;
|
||||
using namespace std::placeholders;
|
||||
using namespace gtsam;
|
||||
|
||||
|
|
|
@ -25,7 +25,6 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/assign/std/map.hpp>
|
||||
#include <iostream>
|
||||
|
||||
namespace {
|
||||
|
|
|
@ -23,11 +23,8 @@
|
|||
#include <gtsam/slam/SmartProjectionFactor.h>
|
||||
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <boost/assign/std/map.hpp>
|
||||
#include <iostream>
|
||||
|
||||
using namespace boost::assign;
|
||||
|
||||
namespace {
|
||||
static const bool isDebugTest = false;
|
||||
static const Symbol l1('l', 1), l2('l', 2), l3('l', 3);
|
||||
|
@ -802,9 +799,9 @@ TEST(SmartProjectionFactor, implicitJacobianFactor ) {
|
|||
Implicit9& implicitSchurFactor =
|
||||
dynamic_cast<Implicit9&>(*gaussianImplicitSchurFactor);
|
||||
|
||||
VectorValues x = map_list_of(c1,
|
||||
(Vector(9) << 1, 2, 3, 4, 5, 6, 7, 8, 9).finished())(c2,
|
||||
(Vector(9) << 11, 12, 13, 14, 15, 16, 17, 18, 19).finished());
|
||||
VectorValues x{
|
||||
{c1, (Vector(9) << 1, 2, 3, 4, 5, 6, 7, 8, 9).finished()},
|
||||
{c2, (Vector(9) << 11, 12, 13, 14, 15, 16, 17, 18, 19).finished()}};
|
||||
|
||||
VectorValues yExpected, yActual;
|
||||
double alpha = 1.0;
|
||||
|
|
|
@ -25,10 +25,8 @@
|
|||
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
|
||||
#include <gtsam/base/numericalDerivative.h>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <boost/assign/std/map.hpp>
|
||||
#include <iostream>
|
||||
|
||||
using namespace boost::assign;
|
||||
using namespace std::placeholders;
|
||||
|
||||
namespace {
|
||||
|
@ -437,7 +435,7 @@ TEST( SmartProjectionPoseFactor, Factors ) {
|
|||
E(2, 0) = 10;
|
||||
E(2, 2) = 1;
|
||||
E(3, 1) = 10;
|
||||
SmartFactor::FBlocks Fs = list_of<Matrix>(F1)(F2);
|
||||
SmartFactor::FBlocks Fs {F1, F2};
|
||||
Vector b(4);
|
||||
b.setZero();
|
||||
|
||||
|
|
|
@ -25,13 +25,11 @@
|
|||
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
|
||||
#include <gtsam/slam/PoseTranslationPrior.h>
|
||||
|
||||
#include <boost/assign/std/map.hpp>
|
||||
#include <iostream>
|
||||
|
||||
#include "smartFactorScenarios.h"
|
||||
#define DISABLE_TIMING
|
||||
|
||||
using namespace boost::assign;
|
||||
using namespace std::placeholders;
|
||||
|
||||
static const double rankTol = 1.0;
|
||||
|
|
|
@ -25,13 +25,10 @@
|
|||
#include <gtsam/base/numericalDerivative.h>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/assign.hpp>
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
#include <boost/bind/bind.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using namespace boost::assign;
|
||||
using namespace std::placeholders;
|
||||
|
||||
// Some common constants
|
||||
|
|
|
@ -60,6 +60,29 @@ namespace gtsam {
|
|||
explicit SymbolicBayesNet(const FactorGraph<DERIVEDCONDITIONAL>& graph)
|
||||
: Base(graph) {}
|
||||
|
||||
/**
|
||||
* Constructor that takes an initializer list of shared pointers.
|
||||
* SymbolicBayesNet bn = {make_shared<SymbolicConditional>(), ...};
|
||||
*/
|
||||
SymbolicBayesNet(std::initializer_list<boost::shared_ptr<SymbolicConditional>> conditionals)
|
||||
: Base(conditionals) {}
|
||||
|
||||
/// Construct from a single conditional
|
||||
SymbolicBayesNet(SymbolicConditional&& c) {
|
||||
push_back(boost::make_shared<SymbolicConditional>(c));
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Add a single conditional and return a reference.
|
||||
* This allows for chaining, e.g.,
|
||||
* SymbolicBayesNet bn =
|
||||
* SymbolicBayesNet(SymbolicConditional(...))(SymbolicConditional(...));
|
||||
*/
|
||||
SymbolicBayesNet& operator()(SymbolicConditional&& c) {
|
||||
push_back(boost::make_shared<SymbolicConditional>(c));
|
||||
return *this;
|
||||
}
|
||||
|
||||
/// Destructor
|
||||
virtual ~SymbolicBayesNet() {}
|
||||
|
||||
|
|
|
@ -22,7 +22,6 @@
|
|||
#include <gtsam/base/Testable.h>
|
||||
|
||||
#include <boost/shared_ptr.hpp>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
#include <boost/make_shared.hpp>
|
||||
|
||||
#include <utility>
|
||||
|
@ -55,27 +54,27 @@ namespace gtsam {
|
|||
|
||||
/** Construct unary factor */
|
||||
explicit SymbolicFactor(Key j) :
|
||||
Base(boost::assign::cref_list_of<1>(j)) {}
|
||||
Base(KeyVector{j}) {}
|
||||
|
||||
/** Construct binary factor */
|
||||
SymbolicFactor(Key j1, Key j2) :
|
||||
Base(boost::assign::cref_list_of<2>(j1)(j2)) {}
|
||||
Base(KeyVector{j1, j2}) {}
|
||||
|
||||
/** Construct ternary factor */
|
||||
SymbolicFactor(Key j1, Key j2, Key j3) :
|
||||
Base(boost::assign::cref_list_of<3>(j1)(j2)(j3)) {}
|
||||
Base(KeyVector{j1, j2, j3}) {}
|
||||
|
||||
/** Construct 4-way factor */
|
||||
SymbolicFactor(Key j1, Key j2, Key j3, Key j4) :
|
||||
Base(boost::assign::cref_list_of<4>(j1)(j2)(j3)(j4)) {}
|
||||
Base(KeyVector{j1, j2, j3, j4}) {}
|
||||
|
||||
/** Construct 5-way factor */
|
||||
SymbolicFactor(Key j1, Key j2, Key j3, Key j4, Key j5) :
|
||||
Base(boost::assign::cref_list_of<5>(j1)(j2)(j3)(j4)(j5)) {}
|
||||
Base(KeyVector{j1, j2, j3, j4, j5}) {}
|
||||
|
||||
/** Construct 6-way factor */
|
||||
SymbolicFactor(Key j1, Key j2, Key j3, Key j4, Key j5, Key j6) :
|
||||
Base(boost::assign::cref_list_of<6>(j1)(j2)(j3)(j4)(j5)(j6)) {}
|
||||
Base(KeyVector{j1, j2, j3, j4, j5, j6}) {}
|
||||
|
||||
/** Create symbolic version of any factor */
|
||||
explicit SymbolicFactor(const Factor& factor) : Base(factor.keys()) {}
|
||||
|
@ -125,15 +124,15 @@ namespace gtsam {
|
|||
return result;
|
||||
}
|
||||
|
||||
/** Constructor from a collection of keys - compatible with boost::assign::list_of and
|
||||
* boost::assign::cref_list_of */
|
||||
/** Constructor from a collection of keys - compatible with boost assign::list_of and
|
||||
* boost assign::cref_list_of */
|
||||
template<class CONTAINER>
|
||||
static SymbolicFactor FromKeys(const CONTAINER& keys) {
|
||||
return SymbolicFactor(Base::FromKeys(keys));
|
||||
}
|
||||
|
||||
/** Constructor from a collection of keys - compatible with boost::assign::list_of and
|
||||
* boost::assign::cref_list_of */
|
||||
/** Constructor from a collection of keys - compatible with boost assign::list_of and
|
||||
* boost assign::cref_list_of */
|
||||
template<class CONTAINER>
|
||||
static SymbolicFactor::shared_ptr FromKeysShared(const CONTAINER& keys) {
|
||||
return FromIteratorsShared(keys.begin(), keys.end());
|
||||
|
|
|
@ -87,6 +87,30 @@ namespace gtsam {
|
|||
template<class DERIVEDFACTOR>
|
||||
SymbolicFactorGraph(const FactorGraph<DERIVEDFACTOR>& graph) : Base(graph) {}
|
||||
|
||||
/**
|
||||
* Constructor that takes an initializer list of shared pointers.
|
||||
* FactorGraph fg = {make_shared<MyFactor>(), ...};
|
||||
*/
|
||||
SymbolicFactorGraph(
|
||||
std::initializer_list<boost::shared_ptr<SymbolicFactor>> sharedFactors)
|
||||
: Base(sharedFactors) {}
|
||||
|
||||
/// Construct from a single factor
|
||||
SymbolicFactorGraph(SymbolicFactor&& c) {
|
||||
push_back(boost::make_shared<SymbolicFactor>(c));
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Add a single factor and return a reference.
|
||||
* This allows for chaining, e.g.,
|
||||
* SymbolicFactorGraph bn =
|
||||
* SymbolicFactorGraph(SymbolicFactor(...))(SymbolicFactor(...));
|
||||
*/
|
||||
SymbolicFactorGraph& operator()(SymbolicFactor&& c) {
|
||||
push_back(boost::make_shared<SymbolicFactor>(c));
|
||||
return *this;
|
||||
}
|
||||
|
||||
/// Destructor
|
||||
virtual ~SymbolicFactorGraph() {}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* GTSAM Copyright 2010-2023, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
@ -10,7 +10,7 @@
|
|||
* -------------------------------------------------------------------------- */
|
||||
|
||||
/*
|
||||
* @file symbolicExampleGraphs.cpp
|
||||
* @file symbolicExampleGraphs.h
|
||||
* @date sept 15, 2012
|
||||
* @author Frank Dellaert
|
||||
* @author Michael Kaess
|
||||
|
@ -20,58 +20,74 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/base/FastDefaultAllocator.h>
|
||||
#include <gtsam/symbolic/SymbolicFactorGraph.h>
|
||||
#include <gtsam/symbolic/SymbolicFactor.h>
|
||||
#include <gtsam/symbolic/SymbolicConditional.h>
|
||||
#include <gtsam/symbolic/SymbolicBayesTree.h>
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
#include <gtsam/inference/Symbol.h>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
namespace {
|
||||
|
||||
const SymbolicFactorGraph simpleTestGraph1 = boost::assign::list_of
|
||||
(boost::make_shared<SymbolicFactor>(0,1))
|
||||
(boost::make_shared<SymbolicFactor>(0,2))
|
||||
(boost::make_shared<SymbolicFactor>(1,4))
|
||||
(boost::make_shared<SymbolicFactor>(2,4))
|
||||
(boost::make_shared<SymbolicFactor>(3,4));
|
||||
// A small helper class to replace Boost's `list_of` function.
|
||||
template <typename T>
|
||||
struct ChainedVector {
|
||||
using Result = std::vector<T, typename internal::FastDefaultAllocator<T>::type>;
|
||||
Result result;
|
||||
|
||||
const SymbolicBayesNet simpleTestGraph1BayesNet = boost::assign::list_of
|
||||
(boost::make_shared<SymbolicConditional>(0,1,2))
|
||||
(boost::make_shared<SymbolicConditional>(1,2,4))
|
||||
(boost::make_shared<SymbolicConditional>(2,4))
|
||||
(boost::make_shared<SymbolicConditional>(3,4))
|
||||
(boost::make_shared<SymbolicConditional>(4));
|
||||
ChainedVector(const T& c) { result.push_back(c); }
|
||||
|
||||
const SymbolicFactorGraph simpleTestGraph2 = boost::assign::list_of
|
||||
(boost::make_shared<SymbolicFactor>(0,1))
|
||||
(boost::make_shared<SymbolicFactor>(0,2))
|
||||
(boost::make_shared<SymbolicFactor>(1,3))
|
||||
(boost::make_shared<SymbolicFactor>(1,4))
|
||||
(boost::make_shared<SymbolicFactor>(2,3))
|
||||
(boost::make_shared<SymbolicFactor>(4,5));
|
||||
ChainedVector& operator()(const T& c) {
|
||||
result.push_back(c);
|
||||
return *this;
|
||||
}
|
||||
|
||||
operator Result() { return result; }
|
||||
};
|
||||
|
||||
const SymbolicFactorGraph simpleTestGraph1 {
|
||||
boost::make_shared<SymbolicFactor>(0,1),
|
||||
boost::make_shared<SymbolicFactor>(0,2),
|
||||
boost::make_shared<SymbolicFactor>(1,4),
|
||||
boost::make_shared<SymbolicFactor>(2,4),
|
||||
boost::make_shared<SymbolicFactor>(3,4)};
|
||||
|
||||
const SymbolicBayesNet simpleTestGraph1BayesNet {
|
||||
boost::make_shared<SymbolicConditional>(0,1,2),
|
||||
boost::make_shared<SymbolicConditional>(1,2,4),
|
||||
boost::make_shared<SymbolicConditional>(2,4),
|
||||
boost::make_shared<SymbolicConditional>(3,4),
|
||||
boost::make_shared<SymbolicConditional>(4)};
|
||||
|
||||
const SymbolicFactorGraph simpleTestGraph2 {
|
||||
boost::make_shared<SymbolicFactor>(0,1),
|
||||
boost::make_shared<SymbolicFactor>(0,2),
|
||||
boost::make_shared<SymbolicFactor>(1,3),
|
||||
boost::make_shared<SymbolicFactor>(1,4),
|
||||
boost::make_shared<SymbolicFactor>(2,3),
|
||||
boost::make_shared<SymbolicFactor>(4,5)};
|
||||
|
||||
/** 1 - 0 - 2 - 3 */
|
||||
const SymbolicFactorGraph simpleChain = boost::assign::list_of
|
||||
(boost::make_shared<SymbolicFactor>(1,0))
|
||||
(boost::make_shared<SymbolicFactor>(0,2))
|
||||
(boost::make_shared<SymbolicFactor>(2,3));
|
||||
const SymbolicFactorGraph simpleChain {
|
||||
boost::make_shared<SymbolicFactor>(1,0),
|
||||
boost::make_shared<SymbolicFactor>(0,2),
|
||||
boost::make_shared<SymbolicFactor>(2,3)};
|
||||
|
||||
/* ************************************************************************* *
|
||||
* 2 3
|
||||
* 0 1 : 2
|
||||
****************************************************************************/
|
||||
* 2 3
|
||||
* 0 1 : 2
|
||||
****************************************************************************/
|
||||
SymbolicBayesTree __simpleChainBayesTree() {
|
||||
SymbolicBayesTree result;
|
||||
result.insertRoot(boost::make_shared<SymbolicBayesTreeClique>(
|
||||
boost::make_shared<SymbolicConditional>(
|
||||
SymbolicConditional::FromKeys(boost::assign::list_of(2)(3), 2))));
|
||||
boost::make_shared<SymbolicConditional>(
|
||||
SymbolicConditional::FromKeys(KeyVector{2,3}, 2))));
|
||||
result.addClique(boost::make_shared<SymbolicBayesTreeClique>(
|
||||
boost::make_shared<SymbolicConditional>(
|
||||
SymbolicConditional::FromKeys(boost::assign::list_of(0)(1)(2), 2))),
|
||||
result.roots().front());
|
||||
boost::make_shared<SymbolicConditional>(
|
||||
SymbolicConditional::FromKeys(KeyVector{0,1,2}, 2))),
|
||||
result.roots().front());
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -79,52 +95,67 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************* */
|
||||
// Keys for ASIA example from the tutorial with A and D evidence
|
||||
const Key _X_=gtsam::symbol_shorthand::X(0), _T_=gtsam::symbol_shorthand::T(0),
|
||||
_S_=gtsam::symbol_shorthand::S(0), _E_=gtsam::symbol_shorthand::E(0),
|
||||
_L_=gtsam::symbol_shorthand::L(0), _B_=gtsam::symbol_shorthand::B(0);
|
||||
const Key _X_ = symbol_shorthand::X(0),
|
||||
_T_ = symbol_shorthand::T(0),
|
||||
_S_ = symbol_shorthand::S(0),
|
||||
_E_ = symbol_shorthand::E(0),
|
||||
_L_ = symbol_shorthand::L(0),
|
||||
_B_ = symbol_shorthand::B(0);
|
||||
|
||||
// Factor graph for Asia example
|
||||
const SymbolicFactorGraph asiaGraph = boost::assign::list_of
|
||||
(boost::make_shared<SymbolicFactor>(_T_))
|
||||
(boost::make_shared<SymbolicFactor>(_S_))
|
||||
(boost::make_shared<SymbolicFactor>(_T_, _E_, _L_))
|
||||
(boost::make_shared<SymbolicFactor>(_L_, _S_))
|
||||
(boost::make_shared<SymbolicFactor>(_S_, _B_))
|
||||
(boost::make_shared<SymbolicFactor>(_E_, _B_))
|
||||
(boost::make_shared<SymbolicFactor>(_E_, _X_));
|
||||
const SymbolicFactorGraph asiaGraph = {
|
||||
boost::make_shared<SymbolicFactor>(_T_),
|
||||
boost::make_shared<SymbolicFactor>(_S_),
|
||||
boost::make_shared<SymbolicFactor>(_T_, _E_, _L_),
|
||||
boost::make_shared<SymbolicFactor>(_L_, _S_),
|
||||
boost::make_shared<SymbolicFactor>(_S_, _B_),
|
||||
boost::make_shared<SymbolicFactor>(_E_, _B_),
|
||||
boost::make_shared<SymbolicFactor>(_E_, _X_)};
|
||||
|
||||
const SymbolicBayesNet asiaBayesNet = boost::assign::list_of
|
||||
(boost::make_shared<SymbolicConditional>(_T_, _E_, _L_))
|
||||
(boost::make_shared<SymbolicConditional>(_X_, _E_))
|
||||
(boost::make_shared<SymbolicConditional>(_E_, _B_, _L_))
|
||||
(boost::make_shared<SymbolicConditional>(_S_, _B_, _L_))
|
||||
(boost::make_shared<SymbolicConditional>(_L_, _B_))
|
||||
(boost::make_shared<SymbolicConditional>(_B_));
|
||||
const SymbolicBayesNet asiaBayesNet = {
|
||||
boost::make_shared<SymbolicConditional>(_T_, _E_, _L_),
|
||||
boost::make_shared<SymbolicConditional>(_X_, _E_),
|
||||
boost::make_shared<SymbolicConditional>(_E_, _B_, _L_),
|
||||
boost::make_shared<SymbolicConditional>(_S_, _B_, _L_),
|
||||
boost::make_shared<SymbolicConditional>(_L_, _B_),
|
||||
boost::make_shared<SymbolicConditional>(_B_)};
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Allow creating Cliques and Keys in `list_of` chaining style:
|
||||
using sharedClique = SymbolicBayesTreeClique::shared_ptr;
|
||||
using Children = ChainedVector<sharedClique>;
|
||||
using Keys = ChainedVector<Key>;
|
||||
|
||||
inline sharedClique LeafClique(const Keys::Result& keys,
|
||||
DenseIndex nrFrontals) {
|
||||
return boost::make_shared<SymbolicBayesTreeClique>(
|
||||
boost::make_shared<SymbolicConditional>(
|
||||
SymbolicConditional::FromKeys(keys, nrFrontals)));
|
||||
}
|
||||
|
||||
inline sharedClique NodeClique(const Keys::Result& keys,
|
||||
DenseIndex nrFrontals,
|
||||
const Children::Result& children) {
|
||||
sharedClique clique = LeafClique(keys, nrFrontals);
|
||||
clique->children.assign(children.begin(), children.end());
|
||||
for (auto&& child : children) child->parent_ = clique;
|
||||
return clique;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// BayesTree for Asia example
|
||||
SymbolicBayesTree __asiaBayesTree() {
|
||||
SymbolicBayesTree result;
|
||||
result.insertRoot(boost::make_shared<SymbolicBayesTreeClique>(
|
||||
boost::make_shared<SymbolicConditional>(
|
||||
SymbolicConditional::FromKeys(boost::assign::list_of(_E_)(_L_)(_B_), 3))));
|
||||
result.addClique(boost::make_shared<SymbolicBayesTreeClique>(
|
||||
boost::make_shared<SymbolicConditional>(
|
||||
SymbolicConditional::FromKeys(boost::assign::list_of(_S_)(_B_) (_L_), 1))),
|
||||
result.roots().front());
|
||||
result.addClique(boost::make_shared<SymbolicBayesTreeClique>(
|
||||
boost::make_shared<SymbolicConditional>(
|
||||
SymbolicConditional::FromKeys(boost::assign::list_of(_T_)(_E_)(_L_), 1))),
|
||||
result.roots().front());
|
||||
result.addClique(boost::make_shared<SymbolicBayesTreeClique>(
|
||||
boost::make_shared<SymbolicConditional>(
|
||||
SymbolicConditional::FromKeys(boost::assign::list_of(_X_)(_E_), 1))),
|
||||
result.roots().front());
|
||||
result.insertRoot(LeafClique({_E_, _L_, _B_}, 3));
|
||||
result.addClique(LeafClique({_S_, _B_, _L_}, 1), result.roots().front());
|
||||
result.addClique(LeafClique({_T_, _E_, _L_}, 1), result.roots().front());
|
||||
result.addClique(LeafClique({_X_, _E_}, 1), result.roots().front());
|
||||
return result;
|
||||
}
|
||||
|
||||
const SymbolicBayesTree asiaBayesTree = __asiaBayesTree();
|
||||
|
||||
/* ************************************************************************* */
|
||||
const Ordering asiaOrdering = boost::assign::list_of(_X_)(_T_)(_S_)(_E_)(_L_)(_B_);
|
||||
|
||||
}
|
||||
}
|
||||
const Ordering asiaOrdering{_X_, _T_, _S_, _E_, _L_, _B_};
|
||||
} // namespace
|
||||
} // namespace gtsam
|
||||
|
|
|
@ -17,16 +17,12 @@
|
|||
* @author Viorela Ila
|
||||
*/
|
||||
|
||||
#include <gtsam/symbolic/SymbolicBayesTree.h>
|
||||
#include <gtsam/symbolic/SymbolicBayesNet.h>
|
||||
#include <gtsam/inference/Symbol.h>
|
||||
#include <gtsam/symbolic/SymbolicBayesNet.h>
|
||||
#include <gtsam/symbolic/SymbolicBayesTree.h>
|
||||
#include <gtsam/symbolic/tests/symbolicExampleGraphs.h>
|
||||
|
||||
#include <boost/assign/list_of.hpp>
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
#include <boost/assign/std/list.hpp>
|
||||
#include <boost/range/adaptor/indirected.hpp>
|
||||
using namespace boost::assign;
|
||||
using boost::adaptors::indirected;
|
||||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
@ -38,37 +34,8 @@ using namespace gtsam::symbol_shorthand;
|
|||
|
||||
static bool debug = false;
|
||||
|
||||
namespace {
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Helper functions for below
|
||||
template<typename KEYS>
|
||||
SymbolicBayesTreeClique::shared_ptr MakeClique(const KEYS& keys, DenseIndex nrFrontals)
|
||||
{
|
||||
return boost::make_shared<SymbolicBayesTreeClique>(
|
||||
boost::make_shared<SymbolicConditional>(
|
||||
SymbolicConditional::FromKeys(keys, nrFrontals)));
|
||||
}
|
||||
|
||||
template<typename KEYS, typename CHILDREN>
|
||||
SymbolicBayesTreeClique::shared_ptr MakeClique(
|
||||
const KEYS& keys, DenseIndex nrFrontals, const CHILDREN& children)
|
||||
{
|
||||
SymbolicBayesTreeClique::shared_ptr clique =
|
||||
boost::make_shared<SymbolicBayesTreeClique>(
|
||||
boost::make_shared<SymbolicConditional>(
|
||||
SymbolicConditional::FromKeys(keys, nrFrontals)));
|
||||
clique->children.assign(children.begin(), children.end());
|
||||
for(typename CHILDREN::const_iterator child = children.begin(); child != children.end(); ++child)
|
||||
(*child)->parent_ = clique;
|
||||
return clique;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(SymbolicBayesTree, clear)
|
||||
{
|
||||
TEST(SymbolicBayesTree, clear) {
|
||||
SymbolicBayesTree bayesTree = asiaBayesTree;
|
||||
bayesTree.clear();
|
||||
|
||||
|
@ -79,34 +46,35 @@ TEST(SymbolicBayesTree, clear)
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(SymbolicBayesTree, clique_structure)
|
||||
{
|
||||
TEST(SymbolicBayesTree, clique_structure) {
|
||||
// l1 l2
|
||||
// / | / |
|
||||
// x1 --- x2 --- x3 --- x4 --- x5
|
||||
// \ |
|
||||
// l3
|
||||
SymbolicFactorGraph graph;
|
||||
graph += SymbolicFactor(X(1), L(1));
|
||||
graph += SymbolicFactor(X(1), X(2));
|
||||
graph += SymbolicFactor(X(2), L(1));
|
||||
graph += SymbolicFactor(X(2), X(3));
|
||||
graph += SymbolicFactor(X(3), X(4));
|
||||
graph += SymbolicFactor(X(4), L(2));
|
||||
graph += SymbolicFactor(X(4), X(5));
|
||||
graph += SymbolicFactor(L(2), X(5));
|
||||
graph += SymbolicFactor(X(4), L(3));
|
||||
graph += SymbolicFactor(X(5), L(3));
|
||||
graph.emplace_shared<SymbolicFactor>(X(1), L(1));
|
||||
graph.emplace_shared<SymbolicFactor>(X(1), X(2));
|
||||
graph.emplace_shared<SymbolicFactor>(X(2), L(1));
|
||||
graph.emplace_shared<SymbolicFactor>(X(2), X(3));
|
||||
graph.emplace_shared<SymbolicFactor>(X(3), X(4));
|
||||
graph.emplace_shared<SymbolicFactor>(X(4), L(2));
|
||||
graph.emplace_shared<SymbolicFactor>(X(4), X(5));
|
||||
graph.emplace_shared<SymbolicFactor>(L(2), X(5));
|
||||
graph.emplace_shared<SymbolicFactor>(X(4), L(3));
|
||||
graph.emplace_shared<SymbolicFactor>(X(5), L(3));
|
||||
|
||||
SymbolicBayesTree expected;
|
||||
expected.insertRoot(
|
||||
MakeClique(list_of(X(2)) (X(3)), 2, list_of
|
||||
(MakeClique(list_of(X(4)) (X(3)), 1, list_of
|
||||
(MakeClique(list_of(X(5)) (L(2)) (X(4)), 2, list_of
|
||||
(MakeClique(list_of(L(3)) (X(4)) (X(5)), 1))))))
|
||||
(MakeClique(list_of(X(1)) (L(1)) (X(2)), 2))));
|
||||
NodeClique(Keys(X(2))(X(3)), 2,
|
||||
Children(NodeClique(
|
||||
Keys(X(4))(X(3)), 1,
|
||||
Children(NodeClique(
|
||||
Keys(X(5))(L(2))(X(4)), 2,
|
||||
Children(LeafClique(Keys(L(3))(X(4))(X(5)), 1))))))(
|
||||
LeafClique(Keys(X(1))(L(1))(X(2)), 2))));
|
||||
|
||||
Ordering order = list_of(X(1)) (L(3)) (L(1)) (X(5)) (X(2)) (L(2)) (X(4)) (X(3));
|
||||
Ordering order{X(1), L(3), L(1), X(5), X(2), L(2), X(4), X(3)};
|
||||
|
||||
SymbolicBayesTree actual = *graph.eliminateMultifrontal(order);
|
||||
|
||||
|
@ -120,56 +88,51 @@ Bayes Tree for testing conversion to a forest of orphans needed for incremental.
|
|||
D|C F|E
|
||||
*/
|
||||
/* ************************************************************************* */
|
||||
TEST( BayesTree, removePath )
|
||||
{
|
||||
const Key _A_=A(0), _B_=B(0), _C_=C(0), _D_=D(0), _E_=E(0), _F_=F(0);
|
||||
TEST(BayesTree, removePath) {
|
||||
const Key _A_ = A(0), _B_ = B(0), _C_ = C(0), _D_ = D(0), _E_ = E(0),
|
||||
_F_ = F(0);
|
||||
|
||||
SymbolicBayesTree bayesTreeOrig;
|
||||
bayesTreeOrig.insertRoot(
|
||||
MakeClique(list_of(_A_)(_B_), 2, list_of
|
||||
(MakeClique(list_of(_C_)(_A_), 1, list_of
|
||||
(MakeClique(list_of(_D_)(_C_), 1))))
|
||||
(MakeClique(list_of(_E_)(_B_), 1, list_of
|
||||
(MakeClique(list_of(_F_)(_E_), 1))))));
|
||||
auto left = NodeClique(Keys(_C_)(_A_), 1, {LeafClique(Keys(_D_)(_C_), 1)});
|
||||
auto right = NodeClique(Keys(_E_)(_B_), 1, {LeafClique(Keys(_F_)(_E_), 1)});
|
||||
bayesTreeOrig.insertRoot(NodeClique(Keys(_A_)(_B_), 2, {left, right}));
|
||||
|
||||
SymbolicBayesTree bayesTree = bayesTreeOrig;
|
||||
|
||||
// remove C, expected outcome: factor graph with ABC,
|
||||
// Bayes Tree now contains two orphan trees: D|C and E|B,F|E
|
||||
SymbolicFactorGraph expected;
|
||||
expected += SymbolicFactor(_A_,_B_);
|
||||
expected += SymbolicFactor(_C_,_A_);
|
||||
SymbolicBayesTree::Cliques expectedOrphans;
|
||||
expectedOrphans += bayesTree[_D_], bayesTree[_E_];
|
||||
expected.emplace_shared<SymbolicFactor>(_A_, _B_);
|
||||
expected.emplace_shared<SymbolicFactor>(_C_, _A_);
|
||||
SymbolicBayesTree::Cliques expectedOrphans{bayesTree[_D_], bayesTree[_E_]};
|
||||
|
||||
SymbolicBayesNet bn;
|
||||
SymbolicBayesTree::Cliques orphans;
|
||||
bayesTree.removePath(bayesTree[_C_], &bn, &orphans);
|
||||
SymbolicFactorGraph factors(bn);
|
||||
CHECK(assert_equal(expected, factors));
|
||||
CHECK(assert_container_equal(expectedOrphans|indirected, orphans|indirected));
|
||||
CHECK(assert_container_equal(expectedOrphans | indirected,
|
||||
orphans | indirected));
|
||||
|
||||
bayesTree = bayesTreeOrig;
|
||||
|
||||
// remove E: factor graph with EB; E|B removed from second orphan tree
|
||||
SymbolicFactorGraph expected2;
|
||||
expected2 += SymbolicFactor(_A_,_B_);
|
||||
expected2 += SymbolicFactor(_E_,_B_);
|
||||
SymbolicBayesTree::Cliques expectedOrphans2;
|
||||
expectedOrphans2 += bayesTree[_F_];
|
||||
expectedOrphans2 += bayesTree[_C_];
|
||||
expected2.emplace_shared<SymbolicFactor>(_A_, _B_);
|
||||
expected2.emplace_shared<SymbolicFactor>(_E_, _B_);
|
||||
SymbolicBayesTree::Cliques expectedOrphans2{bayesTree[_F_], bayesTree[_C_]};
|
||||
|
||||
SymbolicBayesNet bn2;
|
||||
SymbolicBayesTree::Cliques orphans2;
|
||||
bayesTree.removePath(bayesTree[_E_], &bn2, &orphans2);
|
||||
SymbolicFactorGraph factors2(bn2);
|
||||
CHECK(assert_equal(expected2, factors2));
|
||||
CHECK(assert_container_equal(expectedOrphans2|indirected, orphans2|indirected));
|
||||
CHECK(assert_container_equal(expectedOrphans2 | indirected,
|
||||
orphans2 | indirected));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( BayesTree, removePath2 )
|
||||
{
|
||||
TEST(BayesTree, removePath2) {
|
||||
SymbolicBayesTree bayesTree = asiaBayesTree;
|
||||
|
||||
// Call remove-path with clique B
|
||||
|
@ -180,16 +143,16 @@ TEST( BayesTree, removePath2 )
|
|||
|
||||
// Check expected outcome
|
||||
SymbolicFactorGraph expected;
|
||||
expected += SymbolicFactor(_E_,_L_,_B_);
|
||||
expected.emplace_shared<SymbolicFactor>(_E_, _L_, _B_);
|
||||
CHECK(assert_equal(expected, factors));
|
||||
SymbolicBayesTree::Cliques expectedOrphans;
|
||||
expectedOrphans += bayesTree[_S_], bayesTree[_T_], bayesTree[_X_];
|
||||
CHECK(assert_container_equal(expectedOrphans|indirected, orphans|indirected));
|
||||
SymbolicBayesTree::Cliques expectedOrphans{bayesTree[_S_], bayesTree[_T_],
|
||||
bayesTree[_X_]};
|
||||
CHECK(assert_container_equal(expectedOrphans | indirected,
|
||||
orphans | indirected));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(BayesTree, removePath3)
|
||||
{
|
||||
TEST(BayesTree, removePath3) {
|
||||
SymbolicBayesTree bayesTree = asiaBayesTree;
|
||||
|
||||
// Call remove-path with clique T
|
||||
|
@ -200,199 +163,183 @@ TEST(BayesTree, removePath3)
|
|||
|
||||
// Check expected outcome
|
||||
SymbolicFactorGraph expected;
|
||||
expected += SymbolicFactor(_E_, _L_, _B_);
|
||||
expected += SymbolicFactor(_T_, _E_, _L_);
|
||||
expected.emplace_shared<SymbolicFactor>(_E_, _L_, _B_);
|
||||
expected.emplace_shared<SymbolicFactor>(_T_, _E_, _L_);
|
||||
CHECK(assert_equal(expected, factors));
|
||||
SymbolicBayesTree::Cliques expectedOrphans;
|
||||
expectedOrphans += bayesTree[_S_], bayesTree[_X_];
|
||||
CHECK(assert_container_equal(expectedOrphans|indirected, orphans|indirected));
|
||||
SymbolicBayesTree::Cliques expectedOrphans{bayesTree[_S_], bayesTree[_X_]};
|
||||
CHECK(assert_container_equal(expectedOrphans | indirected,
|
||||
orphans | indirected));
|
||||
}
|
||||
|
||||
void getAllCliques(const SymbolicBayesTree::sharedClique& subtree, SymbolicBayesTree::Cliques& cliques) {
|
||||
void getAllCliques(const SymbolicBayesTree::sharedClique& subtree,
|
||||
SymbolicBayesTree::Cliques& cliques) {
|
||||
// Check if subtree exists
|
||||
if (subtree) {
|
||||
cliques.push_back(subtree);
|
||||
// Recursive call over all child cliques
|
||||
for(SymbolicBayesTree::sharedClique& childClique: subtree->children) {
|
||||
getAllCliques(childClique,cliques);
|
||||
for (SymbolicBayesTree::sharedClique& childClique : subtree->children) {
|
||||
getAllCliques(childClique, cliques);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( BayesTree, shortcutCheck )
|
||||
{
|
||||
const Key _A_=6, _B_=5, _C_=4, _D_=3, _E_=2, _F_=1, _G_=0;
|
||||
SymbolicFactorGraph chain = list_of
|
||||
(SymbolicFactor(_A_))
|
||||
(SymbolicFactor(_B_, _A_))
|
||||
(SymbolicFactor(_C_, _A_))
|
||||
(SymbolicFactor(_D_, _C_))
|
||||
(SymbolicFactor(_E_, _B_))
|
||||
(SymbolicFactor(_F_, _E_))
|
||||
(SymbolicFactor(_G_, _F_));
|
||||
Ordering ordering(list_of(_G_)(_F_)(_E_)(_D_)(_C_)(_B_)(_A_));
|
||||
TEST(BayesTree, shortcutCheck) {
|
||||
const Key _A_ = 6, _B_ = 5, _C_ = 4, _D_ = 3, _E_ = 2, _F_ = 1, _G_ = 0;
|
||||
auto chain = SymbolicFactorGraph(SymbolicFactor(_A_)) //
|
||||
(SymbolicFactor(_B_, _A_)) //
|
||||
(SymbolicFactor(_C_, _A_)) //
|
||||
(SymbolicFactor(_D_, _C_)) //
|
||||
(SymbolicFactor(_E_, _B_)) //
|
||||
(SymbolicFactor(_F_, _E_)) //
|
||||
(SymbolicFactor(_G_, _F_));
|
||||
Ordering ordering{_G_, _F_, _E_, _D_, _C_, _B_, _A_};
|
||||
SymbolicBayesTree bayesTree = *chain.eliminateMultifrontal(ordering);
|
||||
|
||||
//bayesTree.saveGraph("BT1.dot");
|
||||
// bayesTree.saveGraph("BT1.dot");
|
||||
|
||||
SymbolicBayesTree::sharedClique rootClique = bayesTree.roots().front();
|
||||
//rootClique->printTree();
|
||||
// rootClique->printTree();
|
||||
SymbolicBayesTree::Cliques allCliques;
|
||||
getAllCliques(rootClique,allCliques);
|
||||
getAllCliques(rootClique, allCliques);
|
||||
|
||||
for(SymbolicBayesTree::sharedClique& clique: allCliques) {
|
||||
//clique->print("Clique#");
|
||||
for (SymbolicBayesTree::sharedClique& clique : allCliques) {
|
||||
// clique->print("Clique#");
|
||||
SymbolicBayesNet bn = clique->shortcut(rootClique);
|
||||
//bn.print("Shortcut:\n");
|
||||
//cout << endl;
|
||||
// bn.print("Shortcut:\n");
|
||||
// cout << endl;
|
||||
}
|
||||
|
||||
// Check if all the cached shortcuts are cleared
|
||||
rootClique->deleteCachedShortcuts();
|
||||
for(SymbolicBayesTree::sharedClique& clique: allCliques) {
|
||||
for (SymbolicBayesTree::sharedClique& clique : allCliques) {
|
||||
bool notCleared = clique->cachedSeparatorMarginal().is_initialized();
|
||||
CHECK( notCleared == false);
|
||||
CHECK(notCleared == false);
|
||||
}
|
||||
EXPECT_LONGS_EQUAL(0, (long)rootClique->numCachedSeparatorMarginals());
|
||||
|
||||
// for(SymbolicBayesTree::sharedClique& clique: allCliques) {
|
||||
// clique->print("Clique#");
|
||||
// if(clique->cachedShortcut()){
|
||||
// bn = clique->cachedShortcut().get();
|
||||
// bn.print("Shortcut:\n");
|
||||
// }
|
||||
// else
|
||||
// cout << "Not Initialized" << endl;
|
||||
// cout << endl;
|
||||
// }
|
||||
// for(SymbolicBayesTree::sharedClique& clique: allCliques) {
|
||||
// clique->print("Clique#");
|
||||
// if(clique->cachedShortcut()){
|
||||
// bn = clique->cachedShortcut().get();
|
||||
// bn.print("Shortcut:\n");
|
||||
// }
|
||||
// else
|
||||
// cout << "Not Initialized" << endl;
|
||||
// cout << endl;
|
||||
// }
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( BayesTree, removeTop )
|
||||
{
|
||||
TEST(BayesTree, removeTop) {
|
||||
SymbolicBayesTree bayesTree = asiaBayesTree;
|
||||
|
||||
// create a new factor to be inserted
|
||||
//boost::shared_ptr<IndexFactor> newFactor(new IndexFactor(_S_,_B_));
|
||||
// boost::shared_ptr<IndexFactor> newFactor(new IndexFactor(_S_,_B_));
|
||||
|
||||
// Remove the contaminated part of the Bayes tree
|
||||
SymbolicBayesNet bn;
|
||||
SymbolicBayesTree::Cliques orphans;
|
||||
bayesTree.removeTop(list_of(_B_)(_S_), &bn, &orphans);
|
||||
bayesTree.removeTop(Keys(_B_)(_S_), &bn, &orphans);
|
||||
|
||||
// Check expected outcome
|
||||
SymbolicBayesNet expected;
|
||||
expected += SymbolicConditional::FromKeys(list_of(_E_)(_L_)(_B_), 3);
|
||||
expected += SymbolicConditional::FromKeys(list_of(_S_)(_B_)(_L_), 1);
|
||||
expected += SymbolicConditional::FromKeys<KeyVector>(Keys(_E_)(_L_)(_B_), 3);
|
||||
expected += SymbolicConditional::FromKeys<KeyVector>(Keys(_S_)(_B_)(_L_), 1);
|
||||
CHECK(assert_equal(expected, bn));
|
||||
|
||||
SymbolicBayesTree::Cliques expectedOrphans;
|
||||
expectedOrphans += bayesTree[_T_], bayesTree[_X_];
|
||||
CHECK(assert_container_equal(expectedOrphans|indirected, orphans|indirected));
|
||||
SymbolicBayesTree::Cliques expectedOrphans{bayesTree[_T_], bayesTree[_X_]};
|
||||
CHECK(assert_container_equal(expectedOrphans | indirected,
|
||||
orphans | indirected));
|
||||
|
||||
// Try removeTop again with a factor that should not change a thing
|
||||
//boost::shared_ptr<IndexFactor> newFactor2(new IndexFactor(_B_));
|
||||
// boost::shared_ptr<IndexFactor> newFactor2(new IndexFactor(_B_));
|
||||
SymbolicBayesNet bn2;
|
||||
SymbolicBayesTree::Cliques orphans2;
|
||||
bayesTree.removeTop(list_of(_B_), &bn2, &orphans2);
|
||||
bayesTree.removeTop(Keys(_B_), &bn2, &orphans2);
|
||||
SymbolicFactorGraph factors2(bn2);
|
||||
SymbolicFactorGraph expected2;
|
||||
CHECK(assert_equal(expected2, factors2));
|
||||
SymbolicBayesTree::Cliques expectedOrphans2;
|
||||
CHECK(assert_container_equal(expectedOrphans2|indirected, orphans2|indirected));
|
||||
CHECK(assert_container_equal(expectedOrphans2 | indirected,
|
||||
orphans2 | indirected));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( BayesTree, removeTop2 )
|
||||
{
|
||||
TEST(BayesTree, removeTop2) {
|
||||
SymbolicBayesTree bayesTree = asiaBayesTree;
|
||||
|
||||
// create two factors to be inserted
|
||||
//SymbolicFactorGraph newFactors;
|
||||
//newFactors.push_factor(_B_);
|
||||
//newFactors.push_factor(_S_);
|
||||
// SymbolicFactorGraph newFactors;
|
||||
// newFactors.push_factor(_B_);
|
||||
// newFactors.push_factor(_S_);
|
||||
|
||||
// Remove the contaminated part of the Bayes tree
|
||||
SymbolicBayesNet bn;
|
||||
SymbolicBayesTree::Cliques orphans;
|
||||
bayesTree.removeTop(list_of(_T_), &bn, &orphans);
|
||||
bayesTree.removeTop(Keys(_T_), &bn, &orphans);
|
||||
|
||||
// Check expected outcome
|
||||
SymbolicBayesNet expected = list_of
|
||||
(SymbolicConditional::FromKeys(list_of(_E_)(_L_)(_B_), 3))
|
||||
(SymbolicConditional::FromKeys(list_of(_T_)(_E_)(_L_), 1));
|
||||
auto expected = SymbolicBayesNet(
|
||||
SymbolicConditional::FromKeys<KeyVector>(Keys(_E_)(_L_)(_B_), 3))(
|
||||
SymbolicConditional::FromKeys<KeyVector>(Keys(_T_)(_E_)(_L_), 1));
|
||||
CHECK(assert_equal(expected, bn));
|
||||
|
||||
SymbolicBayesTree::Cliques expectedOrphans;
|
||||
expectedOrphans += bayesTree[_S_], bayesTree[_X_];
|
||||
CHECK(assert_container_equal(expectedOrphans|indirected, orphans|indirected));
|
||||
SymbolicBayesTree::Cliques expectedOrphans{bayesTree[_S_], bayesTree[_X_]};
|
||||
CHECK(assert_container_equal(expectedOrphans | indirected,
|
||||
orphans | indirected));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( BayesTree, removeTop3 )
|
||||
{
|
||||
SymbolicFactorGraph graph = list_of
|
||||
(SymbolicFactor(L(5)))
|
||||
(SymbolicFactor(X(4), L(5)))
|
||||
(SymbolicFactor(X(2), X(4)))
|
||||
(SymbolicFactor(X(3), X(2)));
|
||||
Ordering ordering(list_of (X(3)) (X(2)) (X(4)) (L(5)) );
|
||||
TEST(BayesTree, removeTop3) {
|
||||
auto graph = SymbolicFactorGraph(SymbolicFactor(L(5)))(SymbolicFactor(
|
||||
X(4), L(5)))(SymbolicFactor(X(2), X(4)))(SymbolicFactor(X(3), X(2)));
|
||||
Ordering ordering{X(3), X(2), X(4), L(5)};
|
||||
SymbolicBayesTree bayesTree = *graph.eliminateMultifrontal(ordering);
|
||||
|
||||
// remove all
|
||||
SymbolicBayesNet bn;
|
||||
SymbolicBayesTree::Cliques orphans;
|
||||
bayesTree.removeTop(list_of(L(5))(X(4))(X(2))(X(3)), &bn, &orphans);
|
||||
bayesTree.removeTop(Keys(L(5))(X(4))(X(2))(X(3)), &bn, &orphans);
|
||||
|
||||
SymbolicBayesNet expectedBn = list_of
|
||||
(SymbolicConditional::FromKeys(list_of(X(4))(L(5)), 2))
|
||||
(SymbolicConditional(X(2), X(4)))
|
||||
(SymbolicConditional(X(3), X(2)));
|
||||
auto expectedBn = SymbolicBayesNet(
|
||||
SymbolicConditional::FromKeys<KeyVector>(Keys(X(4))(L(5)), 2))(
|
||||
SymbolicConditional(X(2), X(4)))(SymbolicConditional(X(3), X(2)));
|
||||
EXPECT(assert_equal(expectedBn, bn));
|
||||
EXPECT(orphans.empty());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( BayesTree, removeTop4 )
|
||||
{
|
||||
SymbolicFactorGraph graph = list_of
|
||||
(SymbolicFactor(L(5)))
|
||||
(SymbolicFactor(X(4), L(5)))
|
||||
(SymbolicFactor(X(2), X(4)))
|
||||
(SymbolicFactor(X(3), X(2)));
|
||||
Ordering ordering(list_of (X(3)) (X(2)) (X(4)) (L(5)) );
|
||||
TEST(BayesTree, removeTop4) {
|
||||
auto graph = SymbolicFactorGraph(SymbolicFactor(L(5)))(SymbolicFactor(
|
||||
X(4), L(5)))(SymbolicFactor(X(2), X(4)))(SymbolicFactor(X(3), X(2)));
|
||||
Ordering ordering{X(3), X(2), X(4), L(5)};
|
||||
SymbolicBayesTree bayesTree = *graph.eliminateMultifrontal(ordering);
|
||||
|
||||
// remove all
|
||||
SymbolicBayesNet bn;
|
||||
SymbolicBayesTree::Cliques orphans;
|
||||
bayesTree.removeTop(list_of(X(2))(L(5))(X(4))(X(3)), &bn, &orphans);
|
||||
bayesTree.removeTop(Keys(X(2))(L(5))(X(4))(X(3)), &bn, &orphans);
|
||||
|
||||
SymbolicBayesNet expectedBn = list_of
|
||||
(SymbolicConditional::FromKeys(list_of(X(4))(L(5)), 2))
|
||||
(SymbolicConditional(X(2), X(4)))
|
||||
(SymbolicConditional(X(3), X(2)));
|
||||
auto expectedBn = SymbolicBayesNet(
|
||||
SymbolicConditional::FromKeys<KeyVector>(Keys(X(4))(L(5)), 2))(
|
||||
SymbolicConditional(X(2), X(4)))(SymbolicConditional(X(3), X(2)));
|
||||
EXPECT(assert_equal(expectedBn, bn));
|
||||
EXPECT(orphans.empty());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( BayesTree, removeTop5 )
|
||||
{
|
||||
TEST(BayesTree, removeTop5) {
|
||||
// Remove top called with variables that are not in the Bayes tree
|
||||
SymbolicFactorGraph graph = list_of
|
||||
(SymbolicFactor(L(5)))
|
||||
(SymbolicFactor(X(4), L(5)))
|
||||
(SymbolicFactor(X(2), X(4)))
|
||||
(SymbolicFactor(X(3), X(2)));
|
||||
Ordering ordering(list_of (X(3)) (X(2)) (X(4)) (L(5)) );
|
||||
auto graph = SymbolicFactorGraph(SymbolicFactor(L(5)))(SymbolicFactor(
|
||||
X(4), L(5)))(SymbolicFactor(X(2), X(4)))(SymbolicFactor(X(3), X(2)));
|
||||
Ordering ordering{X(3), X(2), X(4), L(5)};
|
||||
SymbolicBayesTree bayesTree = *graph.eliminateMultifrontal(ordering);
|
||||
|
||||
// Remove nonexistant
|
||||
SymbolicBayesNet bn;
|
||||
SymbolicBayesTree::Cliques orphans;
|
||||
bayesTree.removeTop(list_of(X(10)), &bn, &orphans);
|
||||
bayesTree.removeTop(Keys(X(10)), &bn, &orphans);
|
||||
|
||||
SymbolicBayesNet expectedBn;
|
||||
EXPECT(assert_equal(expectedBn, bn));
|
||||
|
@ -400,29 +347,28 @@ TEST( BayesTree, removeTop5 )
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( SymbolicBayesTree, thinTree ) {
|
||||
|
||||
// create a thin-tree Bayesnet, a la Jean-Guillaume
|
||||
TEST(SymbolicBayesTree, thinTree) {
|
||||
// create a thin-tree Bayes net, a la Jean-Guillaume
|
||||
SymbolicBayesNet bayesNet;
|
||||
bayesNet.push_back(boost::make_shared<SymbolicConditional>(14));
|
||||
bayesNet.emplace_shared<SymbolicConditional>(14);
|
||||
|
||||
bayesNet.push_back(boost::make_shared<SymbolicConditional>(13, 14));
|
||||
bayesNet.push_back(boost::make_shared<SymbolicConditional>(12, 14));
|
||||
bayesNet.emplace_shared<SymbolicConditional>(13, 14);
|
||||
bayesNet.emplace_shared<SymbolicConditional>(12, 14);
|
||||
|
||||
bayesNet.push_back(boost::make_shared<SymbolicConditional>(11, 13, 14));
|
||||
bayesNet.push_back(boost::make_shared<SymbolicConditional>(10, 13, 14));
|
||||
bayesNet.push_back(boost::make_shared<SymbolicConditional>(9, 12, 14));
|
||||
bayesNet.push_back(boost::make_shared<SymbolicConditional>(8, 12, 14));
|
||||
bayesNet.emplace_shared<SymbolicConditional>(11, 13, 14);
|
||||
bayesNet.emplace_shared<SymbolicConditional>(10, 13, 14);
|
||||
bayesNet.emplace_shared<SymbolicConditional>(9, 12, 14);
|
||||
bayesNet.emplace_shared<SymbolicConditional>(8, 12, 14);
|
||||
|
||||
bayesNet.push_back(boost::make_shared<SymbolicConditional>(7, 11, 13));
|
||||
bayesNet.push_back(boost::make_shared<SymbolicConditional>(6, 11, 13));
|
||||
bayesNet.push_back(boost::make_shared<SymbolicConditional>(5, 10, 13));
|
||||
bayesNet.push_back(boost::make_shared<SymbolicConditional>(4, 10, 13));
|
||||
bayesNet.emplace_shared<SymbolicConditional>(7, 11, 13);
|
||||
bayesNet.emplace_shared<SymbolicConditional>(6, 11, 13);
|
||||
bayesNet.emplace_shared<SymbolicConditional>(5, 10, 13);
|
||||
bayesNet.emplace_shared<SymbolicConditional>(4, 10, 13);
|
||||
|
||||
bayesNet.push_back(boost::make_shared<SymbolicConditional>(3, 9, 12));
|
||||
bayesNet.push_back(boost::make_shared<SymbolicConditional>(2, 9, 12));
|
||||
bayesNet.push_back(boost::make_shared<SymbolicConditional>(1, 8, 12));
|
||||
bayesNet.push_back(boost::make_shared<SymbolicConditional>(0, 8, 12));
|
||||
bayesNet.emplace_shared<SymbolicConditional>(3, 9, 12);
|
||||
bayesNet.emplace_shared<SymbolicConditional>(2, 9, 12);
|
||||
bayesNet.emplace_shared<SymbolicConditional>(1, 8, 12);
|
||||
bayesNet.emplace_shared<SymbolicConditional>(0, 8, 12);
|
||||
|
||||
if (debug) {
|
||||
GTSAM_PRINT(bayesNet);
|
||||
|
@ -430,7 +376,8 @@ TEST( SymbolicBayesTree, thinTree ) {
|
|||
}
|
||||
|
||||
// create a BayesTree out of a Bayes net
|
||||
SymbolicBayesTree bayesTree = *SymbolicFactorGraph(bayesNet).eliminateMultifrontal();
|
||||
SymbolicBayesTree bayesTree =
|
||||
*SymbolicFactorGraph(bayesNet).eliminateMultifrontal();
|
||||
if (debug) {
|
||||
GTSAM_PRINT(bayesTree);
|
||||
bayesTree.saveGraph("/tmp/SymbolicBayesTree.dot");
|
||||
|
@ -442,7 +389,7 @@ TEST( SymbolicBayesTree, thinTree ) {
|
|||
// check shortcut P(S9||R) to root
|
||||
SymbolicBayesTree::Clique::shared_ptr c = bayesTree[9];
|
||||
SymbolicBayesNet shortcut = c->shortcut(R);
|
||||
SymbolicBayesNet expected = list_of(SymbolicConditional(14, 11, 13));
|
||||
auto expected = SymbolicBayesNet(SymbolicConditional(14, 11, 13));
|
||||
EXPECT(assert_equal(expected, shortcut));
|
||||
}
|
||||
|
||||
|
@ -450,9 +397,8 @@ TEST( SymbolicBayesTree, thinTree ) {
|
|||
// check shortcut P(S8||R) to root
|
||||
SymbolicBayesTree::Clique::shared_ptr c = bayesTree[8];
|
||||
SymbolicBayesNet shortcut = c->shortcut(R);
|
||||
SymbolicBayesNet expected = list_of
|
||||
(SymbolicConditional(12, 14))
|
||||
(SymbolicConditional(14, 11, 13));
|
||||
auto expected = SymbolicBayesNet(SymbolicConditional(12, 14))(
|
||||
SymbolicConditional(14, 11, 13));
|
||||
EXPECT(assert_equal(expected, shortcut));
|
||||
}
|
||||
|
||||
|
@ -460,7 +406,7 @@ TEST( SymbolicBayesTree, thinTree ) {
|
|||
// check shortcut P(S4||R) to root
|
||||
SymbolicBayesTree::Clique::shared_ptr c = bayesTree[4];
|
||||
SymbolicBayesNet shortcut = c->shortcut(R);
|
||||
SymbolicBayesNet expected = list_of(SymbolicConditional(10, 11, 13));
|
||||
auto expected = SymbolicBayesNet(SymbolicConditional(10, 11, 13));
|
||||
EXPECT(assert_equal(expected, shortcut));
|
||||
}
|
||||
|
||||
|
@ -468,8 +414,8 @@ TEST( SymbolicBayesTree, thinTree ) {
|
|||
// check shortcut P(S2||R) to root
|
||||
SymbolicBayesTree::Clique::shared_ptr c = bayesTree[2];
|
||||
SymbolicBayesNet shortcut = c->shortcut(R);
|
||||
SymbolicBayesNet expected = list_of(SymbolicConditional(9, 11, 12, 13))
|
||||
(SymbolicConditional(12, 11, 13));
|
||||
auto expected = SymbolicBayesNet(SymbolicConditional(9, 11, 12, 13))(
|
||||
SymbolicConditional(12, 11, 13));
|
||||
EXPECT(assert_equal(expected, shortcut));
|
||||
}
|
||||
|
||||
|
@ -477,28 +423,28 @@ TEST( SymbolicBayesTree, thinTree ) {
|
|||
// check shortcut P(S0||R) to root
|
||||
SymbolicBayesTree::Clique::shared_ptr c = bayesTree[0];
|
||||
SymbolicBayesNet shortcut = c->shortcut(R);
|
||||
SymbolicBayesNet expected = list_of(SymbolicConditional(8, 11, 12, 13))
|
||||
(SymbolicConditional(12, 11, 13));
|
||||
auto expected = SymbolicBayesNet(SymbolicConditional(8, 11, 12, 13))(
|
||||
SymbolicConditional(12, 11, 13));
|
||||
EXPECT(assert_equal(expected, shortcut));
|
||||
}
|
||||
|
||||
SymbolicBayesNet::shared_ptr actualJoint;
|
||||
|
||||
// Check joint P(8,2)
|
||||
if (false) { // TODO, not disjoint
|
||||
if (false) { // TODO, not disjoint
|
||||
actualJoint = bayesTree.jointBayesNet(8, 2);
|
||||
SymbolicBayesNet expected;
|
||||
expected.push_back(boost::make_shared<SymbolicConditional>(8));
|
||||
expected.push_back(boost::make_shared<SymbolicConditional>(2, 8));
|
||||
expected.emplace_shared<SymbolicConditional>(8);
|
||||
expected.emplace_shared<SymbolicConditional>(2, 8);
|
||||
EXPECT(assert_equal(expected, *actualJoint));
|
||||
}
|
||||
|
||||
// Check joint P(1,2)
|
||||
if (false) { // TODO, not disjoint
|
||||
if (false) { // TODO, not disjoint
|
||||
actualJoint = bayesTree.jointBayesNet(1, 2);
|
||||
SymbolicBayesNet expected;
|
||||
expected.push_back(boost::make_shared<SymbolicConditional>(2));
|
||||
expected.push_back(boost::make_shared<SymbolicConditional>(1, 2));
|
||||
expected.emplace_shared<SymbolicConditional>(2);
|
||||
expected.emplace_shared<SymbolicConditional>(1, 2);
|
||||
EXPECT(assert_equal(expected, *actualJoint));
|
||||
}
|
||||
|
||||
|
@ -506,35 +452,33 @@ TEST( SymbolicBayesTree, thinTree ) {
|
|||
if (true) {
|
||||
actualJoint = bayesTree.jointBayesNet(2, 6);
|
||||
SymbolicBayesNet expected;
|
||||
expected.push_back(boost::make_shared<SymbolicConditional>(2, 6));
|
||||
expected.push_back(boost::make_shared<SymbolicConditional>(6));
|
||||
expected.emplace_shared<SymbolicConditional>(2, 6);
|
||||
expected.emplace_shared<SymbolicConditional>(6);
|
||||
EXPECT(assert_equal(expected, *actualJoint));
|
||||
}
|
||||
|
||||
// Check joint P(4,6)
|
||||
if (false) { // TODO, not disjoint
|
||||
if (false) { // TODO, not disjoint
|
||||
actualJoint = bayesTree.jointBayesNet(4, 6);
|
||||
SymbolicBayesNet expected;
|
||||
expected.push_back(boost::make_shared<SymbolicConditional>(6));
|
||||
expected.push_back(boost::make_shared<SymbolicConditional>(4, 6));
|
||||
expected.emplace_shared<SymbolicConditional>(6);
|
||||
expected.emplace_shared<SymbolicConditional>(4, 6);
|
||||
EXPECT(assert_equal(expected, *actualJoint));
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(SymbolicBayesTree, forest_joint)
|
||||
{
|
||||
TEST(SymbolicBayesTree, forest_joint) {
|
||||
// Create forest
|
||||
SymbolicBayesTreeClique::shared_ptr root1 = MakeClique(list_of(1), 1);
|
||||
SymbolicBayesTreeClique::shared_ptr root2 = MakeClique(list_of(2), 1);
|
||||
sharedClique root1 = LeafClique(Keys(1), 1);
|
||||
sharedClique root2 = LeafClique(Keys(2), 1);
|
||||
SymbolicBayesTree bayesTree;
|
||||
bayesTree.insertRoot(root1);
|
||||
bayesTree.insertRoot(root2);
|
||||
|
||||
// Check joint
|
||||
SymbolicBayesNet expected = list_of
|
||||
(SymbolicConditional(1))
|
||||
(SymbolicConditional(2));
|
||||
auto expected =
|
||||
SymbolicBayesNet(SymbolicConditional(1))(SymbolicConditional(2));
|
||||
SymbolicBayesNet actual = *bayesTree.jointBayesNet(1, 2);
|
||||
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
|
@ -550,7 +494,7 @@ TEST(SymbolicBayesTree, forest_joint)
|
|||
C6 0 : 1
|
||||
**************************************************************************** */
|
||||
|
||||
TEST( SymbolicBayesTree, linear_smoother_shortcuts ) {
|
||||
TEST(SymbolicBayesTree, linear_smoother_shortcuts) {
|
||||
// Create smoother with 7 nodes
|
||||
SymbolicFactorGraph smoother;
|
||||
smoother.push_factor(0);
|
||||
|
@ -581,7 +525,8 @@ TEST( SymbolicBayesTree, linear_smoother_shortcuts ) {
|
|||
|
||||
{
|
||||
// check shortcut P(S2||R) to root
|
||||
SymbolicBayesTree::Clique::shared_ptr c = bayesTree[4]; // 4 is frontal in C2
|
||||
SymbolicBayesTree::Clique::shared_ptr c =
|
||||
bayesTree[4]; // 4 is frontal in C2
|
||||
SymbolicBayesNet shortcut = c->shortcut(R);
|
||||
SymbolicBayesNet expected;
|
||||
EXPECT(assert_equal(expected, shortcut));
|
||||
|
@ -589,45 +534,46 @@ TEST( SymbolicBayesTree, linear_smoother_shortcuts ) {
|
|||
|
||||
{
|
||||
// check shortcut P(S3||R) to root
|
||||
SymbolicBayesTree::Clique::shared_ptr c = bayesTree[3]; // 3 is frontal in C3
|
||||
SymbolicBayesTree::Clique::shared_ptr c =
|
||||
bayesTree[3]; // 3 is frontal in C3
|
||||
SymbolicBayesNet shortcut = c->shortcut(R);
|
||||
SymbolicBayesNet expected = list_of(SymbolicConditional(4, 5));
|
||||
auto expected = SymbolicBayesNet(SymbolicConditional(4, 5));
|
||||
EXPECT(assert_equal(expected, shortcut));
|
||||
}
|
||||
|
||||
{
|
||||
// check shortcut P(S4||R) to root
|
||||
SymbolicBayesTree::Clique::shared_ptr c = bayesTree[2]; // 2 is frontal in C4
|
||||
SymbolicBayesTree::Clique::shared_ptr c =
|
||||
bayesTree[2]; // 2 is frontal in C4
|
||||
SymbolicBayesNet shortcut = c->shortcut(R);
|
||||
SymbolicBayesNet expected = list_of(SymbolicConditional(3, 5));
|
||||
auto expected = SymbolicBayesNet(SymbolicConditional(3, 5));
|
||||
EXPECT(assert_equal(expected, shortcut));
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// from testSymbolicJunctionTree, which failed at one point
|
||||
TEST(SymbolicBayesTree, complicatedMarginal)
|
||||
{
|
||||
TEST(SymbolicBayesTree, complicatedMarginal) {
|
||||
// Create the conditionals to go in the BayesTree
|
||||
SymbolicBayesTreeClique::shared_ptr cur;
|
||||
SymbolicBayesTreeClique::shared_ptr root = MakeClique(list_of(11)(12), 2);
|
||||
sharedClique cur;
|
||||
sharedClique root = LeafClique(Keys(11)(12), 2);
|
||||
cur = root;
|
||||
|
||||
root->children += MakeClique(list_of(9)(10)(11)(12), 2);
|
||||
root->children.push_back(LeafClique(Keys(9)(10)(11)(12), 2));
|
||||
root->children.back()->parent_ = root;
|
||||
|
||||
root->children += MakeClique(list_of(7)(8)(11), 2);
|
||||
root->children.push_back(LeafClique(Keys(7)(8)(11), 2));
|
||||
root->children.back()->parent_ = root;
|
||||
cur = root->children.back();
|
||||
|
||||
cur->children += MakeClique(list_of(5)(6)(7)(8), 2);
|
||||
cur->children.push_back(LeafClique(Keys(5)(6)(7)(8), 2));
|
||||
cur->children.back()->parent_ = cur;
|
||||
cur = cur->children.back();
|
||||
|
||||
cur->children += MakeClique(list_of(3)(4)(6), 2);
|
||||
cur->children.push_back(LeafClique(Keys(3)(4)(6), 2));
|
||||
cur->children.back()->parent_ = cur;
|
||||
|
||||
cur->children += MakeClique(list_of(1)(2)(5), 2);
|
||||
cur->children.push_back(LeafClique(Keys(1)(2)(5), 2));
|
||||
cur->children.back()->parent_ = cur;
|
||||
|
||||
// Create Bayes Tree
|
||||
|
@ -656,9 +602,8 @@ TEST(SymbolicBayesTree, complicatedMarginal)
|
|||
{
|
||||
SymbolicBayesTree::Clique::shared_ptr c = bt[5];
|
||||
SymbolicBayesNet shortcut = c->shortcut(root);
|
||||
SymbolicBayesNet expected = list_of
|
||||
(SymbolicConditional(7, 8, 11))
|
||||
(SymbolicConditional(8, 11));
|
||||
auto expected = SymbolicBayesNet(SymbolicConditional(7, 8, 11))(
|
||||
SymbolicConditional(8, 11));
|
||||
EXPECT(assert_equal(expected, shortcut));
|
||||
}
|
||||
|
||||
|
@ -666,7 +611,7 @@ TEST(SymbolicBayesTree, complicatedMarginal)
|
|||
{
|
||||
SymbolicBayesTree::Clique::shared_ptr c = bt[3];
|
||||
SymbolicBayesNet shortcut = c->shortcut(root);
|
||||
SymbolicBayesNet expected = list_of(SymbolicConditional(6, 11));
|
||||
auto expected = SymbolicBayesNet(SymbolicConditional(6, 11));
|
||||
EXPECT(assert_equal(expected, shortcut));
|
||||
}
|
||||
|
||||
|
@ -674,7 +619,7 @@ TEST(SymbolicBayesTree, complicatedMarginal)
|
|||
{
|
||||
SymbolicBayesTree::Clique::shared_ptr c = bt[1];
|
||||
SymbolicBayesNet shortcut = c->shortcut(root);
|
||||
SymbolicBayesNet expected = list_of(SymbolicConditional(5, 11));
|
||||
auto expected = SymbolicBayesNet(SymbolicConditional(5, 11));
|
||||
EXPECT(assert_equal(expected, shortcut));
|
||||
}
|
||||
|
||||
|
@ -689,12 +634,10 @@ TEST(SymbolicBayesTree, complicatedMarginal)
|
|||
SymbolicFactor::shared_ptr actual = bt.marginalFactor(6);
|
||||
EXPECT(assert_equal(SymbolicFactor(6), *actual, 1e-1));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(SymbolicBayesTree, COLAMDvsMETIS) {
|
||||
|
||||
// create circular graph
|
||||
SymbolicFactorGraph sfg;
|
||||
sfg.push_factor(0, 1);
|
||||
|
@ -707,20 +650,23 @@ TEST(SymbolicBayesTree, COLAMDvsMETIS) {
|
|||
// COLAMD
|
||||
{
|
||||
Ordering ordering = Ordering::Create(Ordering::COLAMD, sfg);
|
||||
EXPECT(assert_equal(Ordering(list_of(0)(5)(1)(4)(2)(3)), ordering));
|
||||
EXPECT(assert_equal(Ordering{0, 5, 1, 4, 2, 3}, ordering));
|
||||
|
||||
// - P( 4 2 3)
|
||||
// | - P( 1 | 2 4)
|
||||
// | | - P( 5 | 1 4)
|
||||
// | | | - P( 0 | 1 5)
|
||||
SymbolicBayesTree expected;
|
||||
expected.insertRoot(
|
||||
MakeClique(list_of(4)(2)(3), 3,
|
||||
list_of(
|
||||
MakeClique(list_of(1)(2)(4), 1,
|
||||
list_of(
|
||||
MakeClique(list_of(5)(1)(4), 1,
|
||||
list_of(MakeClique(list_of(0)(1)(5), 1))))))));
|
||||
expected.insertRoot( //
|
||||
NodeClique(
|
||||
Keys(4)(2)(3), 3,
|
||||
Children( //
|
||||
NodeClique(
|
||||
Keys(1)(2)(4), 1,
|
||||
Children( //
|
||||
NodeClique(Keys(5)(1)(4), 1,
|
||||
Children( //
|
||||
LeafClique(Keys(0)(1)(5), 1))))))));
|
||||
|
||||
SymbolicBayesTree actual = *sfg.eliminateMultifrontal(ordering);
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
|
@ -730,13 +676,13 @@ TEST(SymbolicBayesTree, COLAMDvsMETIS) {
|
|||
// METIS
|
||||
{
|
||||
Ordering ordering = Ordering::Create(Ordering::METIS, sfg);
|
||||
// Linux and Mac split differently when using mettis
|
||||
// Linux and Mac split differently when using Metis
|
||||
#if defined(__APPLE__)
|
||||
EXPECT(assert_equal(Ordering(list_of(5)(4)(2)(1)(0)(3)), ordering));
|
||||
EXPECT(assert_equal(Ordering{5, 4, 2, 1, 0, 3}, ordering));
|
||||
#elif defined(_WIN32)
|
||||
EXPECT(assert_equal(Ordering(list_of(4)(3)(1)(0)(5)(2)), ordering));
|
||||
EXPECT(assert_equal(Ordering{4, 3, 1, 0, 5, 2}, ordering));
|
||||
#else
|
||||
EXPECT(assert_equal(Ordering(list_of(3)(2)(5)(0)(4)(1)), ordering));
|
||||
EXPECT(assert_equal(Ordering{3, 2, 5, 0, 4, 1}, ordering));
|
||||
#endif
|
||||
|
||||
// - P( 1 0 3)
|
||||
|
@ -746,25 +692,25 @@ TEST(SymbolicBayesTree, COLAMDvsMETIS) {
|
|||
SymbolicBayesTree expected;
|
||||
#if defined(__APPLE__)
|
||||
expected.insertRoot(
|
||||
MakeClique(list_of(1)(0)(3), 3,
|
||||
list_of(
|
||||
MakeClique(list_of(4)(0)(3), 1,
|
||||
list_of(MakeClique(list_of(5)(0)(4), 1))))(
|
||||
MakeClique(list_of(2)(1)(3), 1))));
|
||||
NodeClique(Keys(1)(0)(3), 3,
|
||||
Children( //
|
||||
NodeClique(Keys(4)(0)(3), 1, //
|
||||
{LeafClique(Keys(5)(0)(4), 1)}))(
|
||||
LeafClique(Keys(2)(1)(3), 1))));
|
||||
#elif defined(_WIN32)
|
||||
expected.insertRoot(
|
||||
MakeClique(list_of(3)(5)(2), 3,
|
||||
list_of(
|
||||
MakeClique(list_of(4)(3)(5), 1,
|
||||
list_of(MakeClique(list_of(0)(2)(5), 1))))(
|
||||
MakeClique(list_of(1)(0)(2), 1))));
|
||||
NodeClique(Keys(3)(5)(2), 3,
|
||||
Children( //
|
||||
NodeClique(Keys(4)(3)(5), 1, //
|
||||
{LeafClique(Keys(0)(2)(5), 1)}))(
|
||||
LeafClique(Keys(1)(0)(2), 1))));
|
||||
#else
|
||||
expected.insertRoot(
|
||||
MakeClique(list_of(2)(4)(1), 3,
|
||||
list_of(
|
||||
MakeClique(list_of(0)(1)(4), 1,
|
||||
list_of(MakeClique(list_of(5)(0)(4), 1))))(
|
||||
MakeClique(list_of(3)(2)(4), 1))));
|
||||
NodeClique(Keys(2)(4)(1), 3,
|
||||
Children( //
|
||||
NodeClique(Keys(0)(1)(4), 1, //
|
||||
{LeafClique(Keys(5)(0)(4), 1)}))(
|
||||
LeafClique(Keys(3)(2)(4), 1))));
|
||||
#endif
|
||||
SymbolicBayesTree actual = *sfg.eliminateMultifrontal(ordering);
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
|
@ -778,4 +724,3 @@ int main() {
|
|||
return TestRegistry::runAllTests(tr);
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
|
||||
|
|
|
@ -15,8 +15,6 @@
|
|||
* @author Frank Dellaert
|
||||
*/
|
||||
|
||||
#include <boost/assign/list_of.hpp>
|
||||
using namespace boost::assign;
|
||||
#include <boost/make_shared.hpp>
|
||||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
@ -69,8 +67,7 @@ TEST( SymbolicConditional, threeParents )
|
|||
/* ************************************************************************* */
|
||||
TEST( SymbolicConditional, fourParents )
|
||||
{
|
||||
SymbolicConditional c0 = SymbolicConditional::FromKeys(
|
||||
list_of(0)(1)(2)(3)(4), 1);
|
||||
auto c0 = SymbolicConditional::FromKeys(KeyVector{0, 1, 2, 3, 4}, 1);
|
||||
LONGS_EQUAL(1, (long)c0.nrFrontals());
|
||||
LONGS_EQUAL(4, (long)c0.nrParents());
|
||||
}
|
||||
|
@ -78,9 +75,8 @@ TEST( SymbolicConditional, fourParents )
|
|||
/* ************************************************************************* */
|
||||
TEST( SymbolicConditional, FromRange )
|
||||
{
|
||||
SymbolicConditional::shared_ptr c0 =
|
||||
boost::make_shared<SymbolicConditional>(
|
||||
SymbolicConditional::FromKeys(list_of(1)(2)(3)(4)(5), 2));
|
||||
auto c0 = boost::make_shared<SymbolicConditional>(
|
||||
SymbolicConditional::FromKeys(KeyVector{1, 2, 3, 4, 5}, 2));
|
||||
LONGS_EQUAL(2, (long)c0->nrFrontals());
|
||||
LONGS_EQUAL(3, (long)c0->nrParents());
|
||||
}
|
||||
|
|
|
@ -17,47 +17,48 @@
|
|||
*/
|
||||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <vector>
|
||||
#include <boost/make_shared.hpp>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
|
||||
#include <gtsam/base/TestableAssertions.h>
|
||||
#include <gtsam/symbolic/SymbolicEliminationTree.h>
|
||||
#include <gtsam/inference/Symbol.h>
|
||||
#include <gtsam/symbolic/SymbolicEliminationTree.h>
|
||||
|
||||
#include <boost/make_shared.hpp>
|
||||
#include <vector>
|
||||
|
||||
#include "symbolicExampleGraphs.h"
|
||||
|
||||
using namespace gtsam;
|
||||
using namespace gtsam::symbol_shorthand;
|
||||
using namespace std;
|
||||
using boost::assign::list_of;
|
||||
using sharedNode = SymbolicEliminationTree::sharedNode;
|
||||
|
||||
// Use list_of replacement defined in symbolicExampleGraphs.h
|
||||
using ChildNodes = ChainedVector<sharedNode>;
|
||||
|
||||
class EliminationTreeTester {
|
||||
public:
|
||||
public:
|
||||
// build hardcoded tree
|
||||
static SymbolicEliminationTree buildHardcodedTree(const SymbolicFactorGraph& fg) {
|
||||
|
||||
SymbolicEliminationTree::sharedNode leaf0(new SymbolicEliminationTree::Node);
|
||||
static SymbolicEliminationTree buildHardcodedTree(
|
||||
const SymbolicFactorGraph& fg) {
|
||||
sharedNode leaf0(new SymbolicEliminationTree::Node);
|
||||
leaf0->key = 0;
|
||||
leaf0->factors.push_back(fg[0]);
|
||||
leaf0->factors.push_back(fg[1]);
|
||||
|
||||
SymbolicEliminationTree::sharedNode node1(new SymbolicEliminationTree::Node);
|
||||
sharedNode node1(new SymbolicEliminationTree::Node);
|
||||
node1->key = 1;
|
||||
node1->factors.push_back(fg[2]);
|
||||
node1->children.push_back(leaf0);
|
||||
|
||||
SymbolicEliminationTree::sharedNode node2(new SymbolicEliminationTree::Node);
|
||||
sharedNode node2(new SymbolicEliminationTree::Node);
|
||||
node2->key = 2;
|
||||
node2->factors.push_back(fg[3]);
|
||||
node2->children.push_back(node1);
|
||||
|
||||
SymbolicEliminationTree::sharedNode leaf3(new SymbolicEliminationTree::Node);
|
||||
sharedNode leaf3(new SymbolicEliminationTree::Node);
|
||||
leaf3->key = 3;
|
||||
leaf3->factors.push_back(fg[4]);
|
||||
|
||||
SymbolicEliminationTree::sharedNode root(new SymbolicEliminationTree::Node);
|
||||
sharedNode root(new SymbolicEliminationTree::Node);
|
||||
root->key = 4;
|
||||
root->children.push_back(leaf3);
|
||||
root->children.push_back(node2);
|
||||
|
@ -67,85 +68,100 @@ public:
|
|||
return tree;
|
||||
}
|
||||
|
||||
template<typename ROOTS>
|
||||
static SymbolicEliminationTree MakeTree(const ROOTS& roots)
|
||||
{
|
||||
static SymbolicEliminationTree MakeTree(const ChildNodes::Result& roots) {
|
||||
SymbolicEliminationTree et;
|
||||
et.roots_.assign(roots.begin(), roots.end());
|
||||
return et;
|
||||
}
|
||||
};
|
||||
|
||||
template<typename FACTORS>
|
||||
static SymbolicEliminationTree::sharedNode MakeNode(Key key, const FACTORS& factors)
|
||||
{
|
||||
SymbolicEliminationTree::sharedNode node = boost::make_shared<SymbolicEliminationTree::Node>();
|
||||
// Create a leaf node.
|
||||
static sharedNode Leaf(Key key, const SymbolicFactorGraph& factors) {
|
||||
sharedNode node(new SymbolicEliminationTree::Node());
|
||||
node->key = key;
|
||||
SymbolicFactorGraph factorsAsGraph = factors;
|
||||
node->factors.assign(factorsAsGraph.begin(), factorsAsGraph.end());
|
||||
node->factors.assign(factors.begin(), factors.end());
|
||||
return node;
|
||||
}
|
||||
|
||||
template<typename FACTORS, typename CHILDREN>
|
||||
static SymbolicEliminationTree::sharedNode MakeNode(Key key, const FACTORS& factors, const CHILDREN& children)
|
||||
{
|
||||
SymbolicEliminationTree::sharedNode node = boost::make_shared<SymbolicEliminationTree::Node>();
|
||||
node->key = key;
|
||||
SymbolicFactorGraph factorsAsGraph = factors;
|
||||
node->factors.assign(factorsAsGraph.begin(), factorsAsGraph.end());
|
||||
// Create a node with children.
|
||||
static sharedNode Node(Key key, const SymbolicFactorGraph& factors,
|
||||
const ChildNodes::Result& children) {
|
||||
sharedNode node = Leaf(key, factors);
|
||||
node->children.assign(children.begin(), children.end());
|
||||
return node;
|
||||
}
|
||||
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(EliminationTree, Create)
|
||||
{
|
||||
TEST(EliminationTree, Create) {
|
||||
SymbolicEliminationTree expected =
|
||||
EliminationTreeTester::buildHardcodedTree(simpleTestGraph1);
|
||||
EliminationTreeTester::buildHardcodedTree(simpleTestGraph1);
|
||||
|
||||
// Build from factor graph
|
||||
Ordering order;
|
||||
order += 0,1,2,3,4;
|
||||
const Ordering order{0, 1, 2, 3, 4};
|
||||
SymbolicEliminationTree actual(simpleTestGraph1, order);
|
||||
|
||||
CHECK(assert_equal(expected, actual));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(EliminationTree, Create2)
|
||||
{
|
||||
TEST(EliminationTree, Create2) {
|
||||
// l1 l2
|
||||
// / | / |
|
||||
// x1 --- x2 --- x3 --- x4 --- x5
|
||||
// \ |
|
||||
// l3
|
||||
SymbolicFactorGraph graph;
|
||||
graph += SymbolicFactor(X(1), L(1));
|
||||
graph += SymbolicFactor(X(1), X(2));
|
||||
graph += SymbolicFactor(X(2), L(1));
|
||||
graph += SymbolicFactor(X(2), X(3));
|
||||
graph += SymbolicFactor(X(3), X(4));
|
||||
graph += SymbolicFactor(X(4), L(2));
|
||||
graph += SymbolicFactor(X(4), X(5));
|
||||
graph += SymbolicFactor(L(2), X(5));
|
||||
graph += SymbolicFactor(X(4), L(3));
|
||||
graph += SymbolicFactor(X(5), L(3));
|
||||
auto binary = [](Key j1, Key j2) -> SymbolicFactor {
|
||||
return SymbolicFactor(j1, j2);
|
||||
};
|
||||
graph += binary(X(1), L(1));
|
||||
graph += binary(X(1), X(2));
|
||||
graph += binary(X(2), L(1));
|
||||
graph += binary(X(2), X(3));
|
||||
graph += binary(X(3), X(4));
|
||||
graph += binary(X(4), L(2));
|
||||
graph += binary(X(4), X(5));
|
||||
graph += binary(L(2), X(5));
|
||||
graph += binary(X(4), L(3));
|
||||
graph += binary(X(5), L(3));
|
||||
|
||||
SymbolicEliminationTree expected = EliminationTreeTester::MakeTree(list_of
|
||||
(MakeNode(X(3), SymbolicFactorGraph(), list_of
|
||||
(MakeNode(X(2), list_of(SymbolicFactor(X(2), X(3))), list_of
|
||||
(MakeNode(L(1), list_of(SymbolicFactor(X(2), L(1))), list_of
|
||||
(MakeNode(X(1), list_of(SymbolicFactor(X(1), L(1))) (SymbolicFactor(X(1), X(2)))))))))
|
||||
(MakeNode(X(4), list_of(SymbolicFactor(X(3), X(4))), list_of
|
||||
(MakeNode(L(2), list_of(SymbolicFactor(X(4), L(2))), list_of
|
||||
(MakeNode(X(5), list_of(SymbolicFactor(X(4), X(5))) (SymbolicFactor(L(2), X(5))), list_of
|
||||
(MakeNode(L(3), list_of(SymbolicFactor(X(4), L(3))) (SymbolicFactor(X(5), L(3))))))))))))));
|
||||
|
||||
Ordering order = list_of(X(1)) (L(3)) (L(1)) (X(5)) (X(2)) (L(2)) (X(4)) (X(3));
|
||||
SymbolicEliminationTree expected = EliminationTreeTester::MakeTree( //
|
||||
ChildNodes( //
|
||||
Node(X(3), SymbolicFactorGraph(),
|
||||
ChildNodes( //
|
||||
Node(X(2), SymbolicFactorGraph(binary(X(2), X(3))),
|
||||
ChildNodes( //
|
||||
Node(L(1), SymbolicFactorGraph(binary(X(2), L(1))),
|
||||
ChildNodes( //
|
||||
Leaf(X(1), SymbolicFactorGraph(
|
||||
binary(X(1), L(1)))(
|
||||
binary(X(1), X(2)))))))))(
|
||||
Node(X(4), SymbolicFactorGraph(binary(X(3), X(4))),
|
||||
ChildNodes( //
|
||||
Node(L(2), SymbolicFactorGraph(binary(X(4), L(2))),
|
||||
ChildNodes( //
|
||||
Node(X(5),
|
||||
SymbolicFactorGraph(binary(
|
||||
X(4), X(5)))(binary(L(2), X(5))),
|
||||
ChildNodes( //
|
||||
Leaf(L(3),
|
||||
SymbolicFactorGraph(
|
||||
binary(X(4), L(3)))(
|
||||
binary(X(5), L(3))) //
|
||||
) //
|
||||
) //
|
||||
) //
|
||||
) //
|
||||
) //
|
||||
) //
|
||||
) //
|
||||
) //
|
||||
) //
|
||||
) //
|
||||
);
|
||||
|
||||
const Ordering order{X(1), L(3), L(1), X(5), X(2), L(2), X(4), X(3)};
|
||||
SymbolicEliminationTree actual(graph, order);
|
||||
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
|
||||
|
|
|
@ -21,14 +21,11 @@
|
|||
#include <gtsam/symbolic/SymbolicConditional.h>
|
||||
#include <gtsam/symbolic/SymbolicFactorGraph.h>
|
||||
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
#include <boost/make_shared.hpp>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using namespace boost::assign;
|
||||
|
||||
/* ************************************************************************* */
|
||||
#ifdef TRACK_ELIMINATE
|
||||
|
@ -70,19 +67,19 @@ TEST(SymbolicFactor, Constructors)
|
|||
/* ************************************************************************* */
|
||||
TEST(SymbolicFactor, EliminateSymbolic)
|
||||
{
|
||||
const SymbolicFactorGraph factors = list_of
|
||||
(SymbolicFactor(2,4,6))
|
||||
(SymbolicFactor(1,2,5))
|
||||
(SymbolicFactor(0,3));
|
||||
const SymbolicFactorGraph factors = {
|
||||
boost::make_shared<SymbolicFactor>(2, 4, 6),
|
||||
boost::make_shared<SymbolicFactor>(1, 2, 5),
|
||||
boost::make_shared<SymbolicFactor>(0, 3)};
|
||||
|
||||
const SymbolicFactor expectedFactor(4,5,6);
|
||||
const SymbolicConditional expectedConditional =
|
||||
SymbolicConditional::FromKeys(list_of(0)(1)(2)(3)(4)(5)(6), 4);
|
||||
SymbolicConditional::FromKeys(KeyVector{0,1,2,3,4,5,6}, 4);
|
||||
|
||||
SymbolicFactor::shared_ptr actualFactor;
|
||||
SymbolicConditional::shared_ptr actualConditional;
|
||||
boost::tie(actualConditional, actualFactor) =
|
||||
EliminateSymbolic(factors, list_of(0)(1)(2)(3));
|
||||
EliminateSymbolic(factors, Ordering{0, 1, 2, 3});
|
||||
|
||||
CHECK(assert_equal(expectedConditional, *actualConditional));
|
||||
CHECK(assert_equal(expectedFactor, *actualFactor));
|
||||
|
|
|
@ -15,34 +15,29 @@
|
|||
* @author Christian Potthast
|
||||
**/
|
||||
|
||||
#include <gtsam/symbolic/SymbolicFactorGraph.h>
|
||||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <gtsam/base/TestableAssertions.h>
|
||||
#include <gtsam/symbolic/SymbolicBayesNet.h>
|
||||
#include <gtsam/symbolic/SymbolicBayesTree.h>
|
||||
#include <gtsam/symbolic/SymbolicConditional.h>
|
||||
#include <gtsam/symbolic/SymbolicFactorGraph.h>
|
||||
#include <gtsam/symbolic/tests/symbolicExampleGraphs.h>
|
||||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/assign/std/set.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using namespace boost::assign;
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(SymbolicFactorGraph, keys1) {
|
||||
KeySet expected;
|
||||
expected += 0, 1, 2, 3, 4;
|
||||
KeySet expected{0, 1, 2, 3, 4};
|
||||
KeySet actual = simpleTestGraph1.keys();
|
||||
EXPECT(expected == actual);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(SymbolicFactorGraph, keys2) {
|
||||
KeySet expected;
|
||||
expected += 0, 1, 2, 3, 4, 5;
|
||||
KeySet expected{0, 1, 2, 3, 4, 5};
|
||||
KeySet actual = simpleTestGraph2.keys();
|
||||
EXPECT(expected == actual);
|
||||
}
|
||||
|
@ -50,8 +45,7 @@ TEST(SymbolicFactorGraph, keys2) {
|
|||
/* ************************************************************************* */
|
||||
TEST(SymbolicFactorGraph, eliminateFullSequential) {
|
||||
// Test with simpleTestGraph1
|
||||
Ordering order;
|
||||
order += 0, 1, 2, 3, 4;
|
||||
Ordering order{0, 1, 2, 3, 4};
|
||||
SymbolicBayesNet actual1 = *simpleTestGraph1.eliminateSequential(order);
|
||||
EXPECT(assert_equal(simpleTestGraph1BayesNet, actual1));
|
||||
|
||||
|
@ -63,18 +57,18 @@ TEST(SymbolicFactorGraph, eliminateFullSequential) {
|
|||
/* ************************************************************************* */
|
||||
TEST(SymbolicFactorGraph, eliminatePartialSequential) {
|
||||
// Eliminate 0 and 1
|
||||
const Ordering order = list_of(0)(1);
|
||||
const Ordering order{0, 1};
|
||||
|
||||
const SymbolicBayesNet expectedBayesNet =
|
||||
list_of(SymbolicConditional(0, 1, 2))(SymbolicConditional(1, 2, 3, 4));
|
||||
const auto expectedBayesNet = SymbolicBayesNet(SymbolicConditional(0, 1, 2))(
|
||||
SymbolicConditional(1, 2, 3, 4));
|
||||
|
||||
const SymbolicFactorGraph expectedSfg = list_of(SymbolicFactor(2, 3))(
|
||||
const auto expectedSfg = SymbolicFactorGraph(SymbolicFactor(2, 3))(
|
||||
SymbolicFactor(4, 5))(SymbolicFactor(2, 3, 4));
|
||||
|
||||
SymbolicBayesNet::shared_ptr actualBayesNet;
|
||||
SymbolicFactorGraph::shared_ptr actualSfg;
|
||||
boost::tie(actualBayesNet, actualSfg) =
|
||||
simpleTestGraph2.eliminatePartialSequential(Ordering(list_of(0)(1)));
|
||||
simpleTestGraph2.eliminatePartialSequential(Ordering{0, 1});
|
||||
|
||||
EXPECT(assert_equal(expectedSfg, *actualSfg));
|
||||
EXPECT(assert_equal(expectedBayesNet, *actualBayesNet));
|
||||
|
@ -82,8 +76,7 @@ TEST(SymbolicFactorGraph, eliminatePartialSequential) {
|
|||
SymbolicBayesNet::shared_ptr actualBayesNet2;
|
||||
SymbolicFactorGraph::shared_ptr actualSfg2;
|
||||
boost::tie(actualBayesNet2, actualSfg2) =
|
||||
simpleTestGraph2.eliminatePartialSequential(
|
||||
list_of(0)(1).convert_to_container<KeyVector>());
|
||||
simpleTestGraph2.eliminatePartialSequential(Ordering{0, 1});
|
||||
|
||||
EXPECT(assert_equal(expectedSfg, *actualSfg2));
|
||||
EXPECT(assert_equal(expectedBayesNet, *actualBayesNet2));
|
||||
|
@ -105,18 +98,18 @@ TEST(SymbolicFactorGraph, eliminatePartialMultifrontal) {
|
|||
SymbolicBayesTree expectedBayesTree;
|
||||
SymbolicConditional::shared_ptr root =
|
||||
boost::make_shared<SymbolicConditional>(
|
||||
SymbolicConditional::FromKeys(list_of(4)(5)(1), 2));
|
||||
SymbolicConditional::FromKeys(KeyVector{4, 5, 1}, 2));
|
||||
expectedBayesTree.insertRoot(
|
||||
boost::make_shared<SymbolicBayesTreeClique>(root));
|
||||
|
||||
SymbolicFactorGraph expectedFactorGraph =
|
||||
list_of(SymbolicFactor(0, 1))(SymbolicFactor(0, 2))(SymbolicFactor(1, 3))(
|
||||
SymbolicFactor(2, 3))(SymbolicFactor(1));
|
||||
const auto expectedFactorGraph =
|
||||
SymbolicFactorGraph(SymbolicFactor(0, 1))(SymbolicFactor(0, 2))(
|
||||
SymbolicFactor(1, 3))(SymbolicFactor(2, 3))(SymbolicFactor(1));
|
||||
|
||||
SymbolicBayesTree::shared_ptr actualBayesTree;
|
||||
SymbolicFactorGraph::shared_ptr actualFactorGraph;
|
||||
boost::tie(actualBayesTree, actualFactorGraph) =
|
||||
simpleTestGraph2.eliminatePartialMultifrontal(Ordering(list_of(4)(5)));
|
||||
simpleTestGraph2.eliminatePartialMultifrontal(Ordering{4, 5});
|
||||
|
||||
EXPECT(assert_equal(expectedFactorGraph, *actualFactorGraph));
|
||||
EXPECT(assert_equal(expectedBayesTree, *actualBayesTree));
|
||||
|
@ -132,8 +125,7 @@ TEST(SymbolicFactorGraph, eliminatePartialMultifrontal) {
|
|||
SymbolicBayesTree::shared_ptr actualBayesTree2;
|
||||
SymbolicFactorGraph::shared_ptr actualFactorGraph2;
|
||||
boost::tie(actualBayesTree2, actualFactorGraph2) =
|
||||
simpleTestGraph2.eliminatePartialMultifrontal(
|
||||
list_of<Key>(4)(5).convert_to_container<KeyVector>());
|
||||
simpleTestGraph2.eliminatePartialMultifrontal(KeyVector{4, 5});
|
||||
|
||||
EXPECT(assert_equal(expectedFactorGraph, *actualFactorGraph2));
|
||||
EXPECT(assert_equal(expectedBayesTree2, *actualBayesTree2));
|
||||
|
@ -141,12 +133,12 @@ TEST(SymbolicFactorGraph, eliminatePartialMultifrontal) {
|
|||
|
||||
/* ************************************************************************* */
|
||||
TEST(SymbolicFactorGraph, marginalMultifrontalBayesNet) {
|
||||
SymbolicBayesNet expectedBayesNet =
|
||||
list_of(SymbolicConditional(0, 1, 2))(SymbolicConditional(1, 2, 3))(
|
||||
SymbolicConditional(2, 3))(SymbolicConditional(3));
|
||||
auto expectedBayesNet =
|
||||
SymbolicBayesNet(SymbolicConditional(0, 1, 2))(SymbolicConditional(
|
||||
1, 2, 3))(SymbolicConditional(2, 3))(SymbolicConditional(3));
|
||||
|
||||
SymbolicBayesNet actual1 = *simpleTestGraph2.marginalMultifrontalBayesNet(
|
||||
Ordering(list_of(0)(1)(2)(3)));
|
||||
SymbolicBayesNet actual1 =
|
||||
*simpleTestGraph2.marginalMultifrontalBayesNet(Ordering{0, 1, 2, 3});
|
||||
EXPECT(assert_equal(expectedBayesNet, actual1));
|
||||
}
|
||||
|
||||
|
@ -184,7 +176,7 @@ TEST(SymbolicFactorGraph, marginals) {
|
|||
fg.push_factor(3, 4);
|
||||
|
||||
// eliminate
|
||||
Ordering ord(list_of(3)(4)(2)(1)(0));
|
||||
Ordering ord{3, 4, 2, 1, 0};
|
||||
auto actual = fg.eliminateSequential(ord);
|
||||
SymbolicBayesNet expected;
|
||||
expected.emplace_shared<SymbolicConditional>(3, 4);
|
||||
|
@ -196,7 +188,7 @@ TEST(SymbolicFactorGraph, marginals) {
|
|||
|
||||
{
|
||||
// jointBayesNet
|
||||
Ordering ord(list_of(0)(4)(3));
|
||||
Ordering ord{0, 4, 3};
|
||||
auto actual = fg.eliminatePartialSequential(ord);
|
||||
SymbolicBayesNet expectedBN;
|
||||
expectedBN.emplace_shared<SymbolicConditional>(0, 1, 2);
|
||||
|
@ -207,7 +199,7 @@ TEST(SymbolicFactorGraph, marginals) {
|
|||
|
||||
{
|
||||
// jointBayesNet
|
||||
Ordering ord(list_of(0)(2)(3));
|
||||
Ordering ord{0, 2, 3};
|
||||
auto actual = fg.eliminatePartialSequential(ord);
|
||||
SymbolicBayesNet expectedBN;
|
||||
expectedBN.emplace_shared<SymbolicConditional>(0, 1, 2);
|
||||
|
@ -218,7 +210,7 @@ TEST(SymbolicFactorGraph, marginals) {
|
|||
|
||||
{
|
||||
// conditionalBayesNet
|
||||
Ordering ord(list_of(0)(2));
|
||||
Ordering ord{0, 2};
|
||||
auto actual = fg.eliminatePartialSequential(ord);
|
||||
SymbolicBayesNet expectedBN;
|
||||
expectedBN.emplace_shared<SymbolicConditional>(0, 1, 2);
|
||||
|
@ -306,7 +298,7 @@ TEST(SymbolicFactorGraph, add_factors) {
|
|||
expected.push_factor(1);
|
||||
expected.push_factor(11);
|
||||
expected.push_factor(2);
|
||||
const FactorIndices expectedIndices = list_of(1)(3);
|
||||
const FactorIndices expectedIndices{1, 3};
|
||||
const FactorIndices actualIndices = fg1.add_factors(fg2, true);
|
||||
|
||||
EXPECT(assert_equal(expected, fg1));
|
||||
|
@ -314,7 +306,7 @@ TEST(SymbolicFactorGraph, add_factors) {
|
|||
|
||||
expected.push_factor(1);
|
||||
expected.push_factor(2);
|
||||
const FactorIndices expectedIndices2 = list_of(4)(5);
|
||||
const FactorIndices expectedIndices2{4, 5};
|
||||
const FactorIndices actualIndices2 = fg1.add_factors(fg2, false);
|
||||
|
||||
EXPECT(assert_equal(expected, fg1));
|
||||
|
|
|
@ -20,9 +20,6 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/assign/std/list.hpp> // for operator +=
|
||||
using namespace boost::assign;
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
||||
|
@ -86,7 +83,7 @@ TEST( SymbolicISAM, iSAM )
|
|||
fullGraph += SymbolicFactor(_B_, _S_);
|
||||
|
||||
// This ordering is chosen to match the one chosen by COLAMD during the ISAM update
|
||||
Ordering ordering(list_of(_X_)(_B_)(_S_)(_E_)(_L_)(_T_));
|
||||
Ordering ordering {_X_, _B_, _S_, _E_, _L_, _T_};
|
||||
SymbolicBayesTree expected = *fullGraph.eliminateMultifrontal(ordering);
|
||||
|
||||
// Add factor on B and S
|
||||
|
|
|
@ -23,9 +23,6 @@
|
|||
#include <gtsam/symbolic/SymbolicEliminationTree.h>
|
||||
#include <gtsam/symbolic/SymbolicJunctionTree.h>
|
||||
|
||||
#include <boost/assign/list_of.hpp>
|
||||
using namespace boost::assign;
|
||||
|
||||
#include "symbolicExampleGraphs.h"
|
||||
|
||||
using namespace gtsam;
|
||||
|
@ -43,9 +40,9 @@ TEST( JunctionTree, constructor )
|
|||
SymbolicJunctionTree actual(SymbolicEliminationTree(simpleChain, order));
|
||||
|
||||
SymbolicJunctionTree::Node::Keys
|
||||
frontal1 = list_of(2)(3),
|
||||
frontal2 = list_of(0)(1),
|
||||
sep1, sep2 = list_of(2);
|
||||
frontal1 {2, 3},
|
||||
frontal2 {0, 1},
|
||||
sep1, sep2 {2};
|
||||
EXPECT(assert_container_equality(frontal1, actual.roots().front()->orderedFrontalKeys));
|
||||
//EXPECT(assert_equal(sep1, actual.roots().front()->separator));
|
||||
LONGS_EQUAL(1, (long)actual.roots().front()->factors.size());
|
||||
|
|
|
@ -22,10 +22,6 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/assign/std/list.hpp>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
using namespace boost::assign;
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
||||
|
@ -79,7 +75,7 @@ TEST(VariableIndex, augment2) {
|
|||
|
||||
VariableIndex expected(fgCombined);
|
||||
|
||||
FactorIndices newIndices = list_of(5)(6)(7)(8);
|
||||
FactorIndices newIndices {5, 6, 7, 8};
|
||||
VariableIndex actual(fg1);
|
||||
actual.augment(fg2, newIndices);
|
||||
|
||||
|
@ -108,7 +104,7 @@ TEST(VariableIndex, remove) {
|
|||
vector<size_t> indices;
|
||||
indices.push_back(0); indices.push_back(1); indices.push_back(2); indices.push_back(3);
|
||||
actual.remove(indices.begin(), indices.end(), fg1);
|
||||
std::list<Key> unusedVariables; unusedVariables += 0, 9;
|
||||
std::list<Key> unusedVariables{0, 9};
|
||||
actual.removeUnusedVariables(unusedVariables.begin(), unusedVariables.end());
|
||||
|
||||
CHECK(assert_equal(expected, actual));
|
||||
|
@ -135,7 +131,7 @@ TEST(VariableIndex, deep_copy) {
|
|||
vector<size_t> indices;
|
||||
indices.push_back(0); indices.push_back(1); indices.push_back(2); indices.push_back(3);
|
||||
clone.remove(indices.begin(), indices.end(), fg1);
|
||||
std::list<Key> unusedVariables; unusedVariables += 0, 9;
|
||||
std::list<Key> unusedVariables{0, 9};
|
||||
clone.removeUnusedVariables(unusedVariables.begin(), unusedVariables.end());
|
||||
|
||||
// When modifying the clone, the original should have stayed the same
|
||||
|
|
|
@ -17,12 +17,12 @@
|
|||
*/
|
||||
|
||||
#include <boost/shared_ptr.hpp>
|
||||
#include <boost/assign/std/list.hpp> // for +=
|
||||
using namespace boost::assign;
|
||||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <gtsam_unstable/base/BTree.h>
|
||||
|
||||
#include <list>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
||||
|
@ -147,13 +147,12 @@ TEST( BTree, iterating )
|
|||
|
||||
// acid iterator test
|
||||
int sum = 0;
|
||||
for(const KeyInt& p: tree)
|
||||
sum += p.second;
|
||||
for (const KeyInt& p : tree) sum += p.second;
|
||||
LONGS_EQUAL(15,sum)
|
||||
|
||||
// STL iterator test
|
||||
list<KeyInt> expected, actual;
|
||||
expected += p1,p2,p3,p4,p5;
|
||||
auto expected = std::list<KeyInt> {p1, p2, p3, p4, p5};
|
||||
std::list<KeyInt> actual;
|
||||
copy (tree.begin(),tree.end(),back_inserter(actual));
|
||||
CHECK(actual==expected)
|
||||
}
|
||||
|
@ -181,7 +180,7 @@ TEST( BTree, stress )
|
|||
tree = tree.add(key, value);
|
||||
LONGS_EQUAL(i,tree.size())
|
||||
CHECK(tree.find(key) == value)
|
||||
expected += make_pair(key, value);
|
||||
expected.emplace_back(key, value);
|
||||
}
|
||||
|
||||
// Check height is log(N)
|
||||
|
|
|
@ -20,10 +20,6 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/assign/std/list.hpp>
|
||||
#include <boost/assign/std/set.hpp>
|
||||
using namespace boost::assign;
|
||||
|
||||
#include <iostream>
|
||||
|
||||
using namespace std;
|
||||
|
@ -88,7 +84,7 @@ TEST(DSF, makePair) {
|
|||
/* ************************************************************************* */
|
||||
TEST(DSF, makeList) {
|
||||
DSFInt dsf;
|
||||
list<int> keys; keys += 5, 6, 7;
|
||||
list<int> keys{5, 6, 7};
|
||||
dsf = dsf.makeList(keys);
|
||||
EXPECT(dsf.findSet(5) == dsf.findSet(7));
|
||||
}
|
||||
|
@ -112,7 +108,7 @@ TEST(DSF, sets) {
|
|||
map<int, set<int> > sets = dsf.sets();
|
||||
LONGS_EQUAL(1, sets.size());
|
||||
|
||||
set<int> expected; expected += 5, 6;
|
||||
set<int> expected{5, 6};
|
||||
EXPECT(expected == sets[dsf.findSet(5)]);
|
||||
}
|
||||
|
||||
|
@ -127,7 +123,7 @@ TEST(DSF, sets2) {
|
|||
map<int, set<int> > sets = dsf.sets();
|
||||
LONGS_EQUAL(1, sets.size());
|
||||
|
||||
set<int> expected; expected += 5, 6, 7;
|
||||
set<int> expected{5, 6, 7};
|
||||
EXPECT(expected == sets[dsf.findSet(5)]);
|
||||
}
|
||||
|
||||
|
@ -141,7 +137,7 @@ TEST(DSF, sets3) {
|
|||
map<int, set<int> > sets = dsf.sets();
|
||||
LONGS_EQUAL(2, sets.size());
|
||||
|
||||
set<int> expected; expected += 5, 6;
|
||||
set<int> expected{5, 6};
|
||||
EXPECT(expected == sets[dsf.findSet(5)]);
|
||||
}
|
||||
|
||||
|
@ -152,11 +148,11 @@ TEST(DSF, partition) {
|
|||
dsf = dsf.makeSet(6);
|
||||
dsf = dsf.makeUnion(5,6);
|
||||
|
||||
list<int> keys; keys += 5;
|
||||
list<int> keys{5};
|
||||
map<int, set<int> > partitions = dsf.partition(keys);
|
||||
LONGS_EQUAL(1, partitions.size());
|
||||
|
||||
set<int> expected; expected += 5;
|
||||
set<int> expected{5};
|
||||
EXPECT(expected == partitions[dsf.findSet(5)]);
|
||||
}
|
||||
|
||||
|
@ -168,11 +164,11 @@ TEST(DSF, partition2) {
|
|||
dsf = dsf.makeSet(7);
|
||||
dsf = dsf.makeUnion(5,6);
|
||||
|
||||
list<int> keys; keys += 7;
|
||||
list<int> keys{7};
|
||||
map<int, set<int> > partitions = dsf.partition(keys);
|
||||
LONGS_EQUAL(1, partitions.size());
|
||||
|
||||
set<int> expected; expected += 7;
|
||||
set<int> expected{7};
|
||||
EXPECT(expected == partitions[dsf.findSet(7)]);
|
||||
}
|
||||
|
||||
|
@ -184,11 +180,11 @@ TEST(DSF, partition3) {
|
|||
dsf = dsf.makeSet(7);
|
||||
dsf = dsf.makeUnion(5,6);
|
||||
|
||||
list<int> keys; keys += 5, 7;
|
||||
list<int> keys{5, 7};
|
||||
map<int, set<int> > partitions = dsf.partition(keys);
|
||||
LONGS_EQUAL(2, partitions.size());
|
||||
|
||||
set<int> expected; expected += 5;
|
||||
set<int> expected{5};
|
||||
EXPECT(expected == partitions[dsf.findSet(5)]);
|
||||
}
|
||||
|
||||
|
@ -202,7 +198,7 @@ TEST(DSF, set) {
|
|||
set<int> set = dsf.set(5);
|
||||
LONGS_EQUAL(2, set.size());
|
||||
|
||||
std::set<int> expected; expected += 5, 6;
|
||||
std::set<int> expected{5, 6};
|
||||
EXPECT(expected == set);
|
||||
}
|
||||
|
||||
|
@ -217,7 +213,7 @@ TEST(DSF, set2) {
|
|||
set<int> set = dsf.set(5);
|
||||
LONGS_EQUAL(3, set.size());
|
||||
|
||||
std::set<int> expected; expected += 5, 6, 7;
|
||||
std::set<int> expected{5, 6, 7};
|
||||
EXPECT(expected == set);
|
||||
}
|
||||
|
||||
|
@ -261,7 +257,7 @@ TEST(DSF, flatten) {
|
|||
/* ************************************************************************* */
|
||||
TEST(DSF, flatten2) {
|
||||
static string x1("x1"), x2("x2"), x3("x3"), x4("x4");
|
||||
list<string> keys; keys += x1,x2,x3,x4;
|
||||
list<string> keys{x1, x2, x3, x4};
|
||||
DSF<string> dsf(keys);
|
||||
dsf = dsf.makeUnion(x1,x2);
|
||||
dsf = dsf.makeUnion(x3,x4);
|
||||
|
@ -285,13 +281,12 @@ TEST(DSF, mergePairwiseMatches) {
|
|||
Measurement m22(2,2),m23(2,3),m25(2,5),m26(2,6); // in image 2
|
||||
|
||||
// Add them all
|
||||
list<Measurement> measurements;
|
||||
measurements += m11,m12,m14, m22,m23,m25,m26;
|
||||
list<Measurement> measurements{m11, m12, m14, m22, m23, m25, m26};
|
||||
|
||||
// Create some "matches"
|
||||
typedef pair<Measurement,Measurement> Match;
|
||||
list<Match> matches;
|
||||
matches += Match(m11,m22), Match(m12,m23), Match(m14,m25), Match(m14,m26);
|
||||
list<Match> matches{Match(m11, m22), Match(m12, m23), Match(m14, m25),
|
||||
Match(m14, m26)};
|
||||
|
||||
// Merge matches
|
||||
DSF<Measurement> dsf(measurements);
|
||||
|
@ -310,15 +305,15 @@ TEST(DSF, mergePairwiseMatches) {
|
|||
// Check that we have three connected components
|
||||
EXPECT_LONGS_EQUAL(3, dsf.numSets());
|
||||
|
||||
set<Measurement> expected1; expected1 += m11,m22;
|
||||
set<Measurement> expected1{m11,m22};
|
||||
set<Measurement> actual1 = dsf.set(m11);
|
||||
EXPECT(expected1 == actual1);
|
||||
|
||||
set<Measurement> expected2; expected2 += m12,m23;
|
||||
set<Measurement> expected2{m12,m23};
|
||||
set<Measurement> actual2 = dsf.set(m12);
|
||||
EXPECT(expected2 == actual2);
|
||||
|
||||
set<Measurement> expected3; expected3 += m14,m25,m26;
|
||||
set<Measurement> expected3{m14,m25,m26};
|
||||
set<Measurement> actual3 = dsf.set(m14);
|
||||
EXPECT(expected3 == actual3);
|
||||
}
|
||||
|
|
|
@ -21,7 +21,6 @@
|
|||
#include <gtsam/discrete/DiscreteValues.h>
|
||||
#include <gtsam_unstable/dllexport.h>
|
||||
|
||||
#include <boost/assign.hpp>
|
||||
#include <boost/format.hpp>
|
||||
#include <map>
|
||||
|
||||
|
@ -40,11 +39,10 @@ class GTSAM_UNSTABLE_EXPORT Constraint : public DiscreteFactor {
|
|||
|
||||
protected:
|
||||
/// Construct unary constraint factor.
|
||||
Constraint(Key j) : DiscreteFactor(boost::assign::cref_list_of<1>(j)) {}
|
||||
Constraint(Key j) : DiscreteFactor(KeyVector{j}) {}
|
||||
|
||||
/// Construct binary constraint factor.
|
||||
Constraint(Key j1, Key j2)
|
||||
: DiscreteFactor(boost::assign::cref_list_of<2>(j1)(j2)) {}
|
||||
Constraint(Key j1, Key j2) : DiscreteFactor(KeyVector{j1, j2}) {}
|
||||
|
||||
/// Construct n-way constraint factor.
|
||||
Constraint(const KeyVector& js) : DiscreteFactor(js) {}
|
||||
|
|
|
@ -37,8 +37,7 @@ double Domain::operator()(const DiscreteValues& values) const {
|
|||
|
||||
/* ************************************************************************* */
|
||||
DecisionTreeFactor Domain::toDecisionTreeFactor() const {
|
||||
DiscreteKeys keys;
|
||||
keys += DiscreteKey(key(), cardinality_);
|
||||
const DiscreteKeys keys{DiscreteKey(key(), cardinality_)};
|
||||
vector<double> table;
|
||||
for (size_t i1 = 0; i1 < cardinality_; ++i1) table.push_back(contains(i1));
|
||||
DecisionTreeFactor converted(keys, table);
|
||||
|
|
|
@ -29,8 +29,7 @@ double SingleValue::operator()(const DiscreteValues& values) const {
|
|||
|
||||
/* ************************************************************************* */
|
||||
DecisionTreeFactor SingleValue::toDecisionTreeFactor() const {
|
||||
DiscreteKeys keys;
|
||||
keys += DiscreteKey(keys_[0], cardinality_);
|
||||
const DiscreteKeys keys{DiscreteKey(keys_[0], cardinality_)};
|
||||
vector<double> table;
|
||||
for (size_t i1 = 0; i1 < cardinality_; i1++) table.push_back(i1 == value_);
|
||||
DecisionTreeFactor converted(keys, table);
|
||||
|
|
|
@ -214,8 +214,7 @@ void sampleSolutions() {
|
|||
vector<DiscreteBayesNet::shared_ptr> samplers(7);
|
||||
|
||||
// Given the time-slots, we can create 7 independent samplers
|
||||
vector<size_t> slots;
|
||||
slots += 16, 17, 11, 2, 0, 5, 9; // given slots
|
||||
vector<size_t> slots{16, 17, 11, 2, 0, 5, 9}; // given slots
|
||||
for (size_t i = 0; i < 7; i++)
|
||||
samplers[i] = createSampler(i, slots[i], schedulers);
|
||||
|
||||
|
@ -296,8 +295,7 @@ void accomodateStudent() {
|
|||
scheduler.print("scheduler");
|
||||
|
||||
// rule out all occupied slots
|
||||
vector<size_t> slots;
|
||||
slots += 16, 17, 11, 2, 0, 5, 9, 14;
|
||||
vector<size_t> slots{16, 17, 11, 2, 0, 5, 9, 14};
|
||||
vector<double> slotsAvailable(scheduler.nrTimeSlots(), 1.0);
|
||||
for(size_t s: slots)
|
||||
slotsAvailable[s] = 0;
|
||||
|
|
|
@ -223,8 +223,7 @@ void sampleSolutions() {
|
|||
vector<DiscreteBayesNet::shared_ptr> samplers(NRSTUDENTS);
|
||||
|
||||
// Given the time-slots, we can create NRSTUDENTS independent samplers
|
||||
vector<size_t> slots;
|
||||
slots += 3, 20, 2, 6, 5, 11, 1, 4; // given slots
|
||||
vector<size_t> slots{3, 20, 2, 6, 5, 11, 1, 4}; // given slots
|
||||
for (size_t i = 0; i < NRSTUDENTS; i++)
|
||||
samplers[i] = createSampler(i, slots[i], schedulers);
|
||||
|
||||
|
|
|
@ -248,8 +248,7 @@ void sampleSolutions() {
|
|||
vector<DiscreteBayesNet::shared_ptr> samplers(NRSTUDENTS);
|
||||
|
||||
// Given the time-slots, we can create NRSTUDENTS independent samplers
|
||||
vector<size_t> slots;
|
||||
slots += 12,11,13, 21,16,1, 3,2,6, 7,22,4; // given slots
|
||||
vector<size_t> slots{12,11,13, 21,16,1, 3,2,6, 7,22,4}; // given slots
|
||||
for (size_t i = 0; i < NRSTUDENTS; i++)
|
||||
samplers[i] = createSampler(i, slots[i], schedulers);
|
||||
|
||||
|
|
|
@ -11,14 +11,12 @@
|
|||
#include <gtsam/discrete/DiscreteConditional.h>
|
||||
#include <gtsam/inference/VariableIndex.h>
|
||||
|
||||
#include <boost/assign/list_of.hpp>
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
|
||||
using namespace std;
|
||||
using namespace boost;
|
||||
using namespace boost::assign;
|
||||
using namespace gtsam;
|
||||
|
||||
/**
|
||||
|
@ -105,7 +103,7 @@ class LoopyBelief {
|
|||
if (debug) subGraph.print("------- Subgraph:");
|
||||
DiscreteFactor::shared_ptr message;
|
||||
boost::tie(dummyCond, message) =
|
||||
EliminateDiscrete(subGraph, Ordering(list_of(neighbor)));
|
||||
EliminateDiscrete(subGraph, Ordering{neighbor});
|
||||
// store the new factor into messages
|
||||
messages.insert(make_pair(neighbor, message));
|
||||
if (debug) message->print("------- Message: ");
|
||||
|
@ -230,7 +228,7 @@ TEST_UNSAFE(LoopyBelief, construction) {
|
|||
|
||||
// Map from key to DiscreteKey for building belief factor.
|
||||
// TODO: this is bad!
|
||||
std::map<Key, DiscreteKey> allKeys = map_list_of(0, C)(1, S)(2, R)(3, W);
|
||||
std::map<Key, DiscreteKey> allKeys{{0, C}, {1, S}, {2, R}, {3, W}};
|
||||
|
||||
// Build graph
|
||||
DecisionTreeFactor pC(C, "0.5 0.5");
|
||||
|
|
|
@ -58,14 +58,14 @@ class Sudoku : public CSP {
|
|||
// add row constraints
|
||||
for (size_t i = 0; i < n; i++) {
|
||||
DiscreteKeys dkeys;
|
||||
for (size_t j = 0; j < n; j++) dkeys += dkey(i, j);
|
||||
for (size_t j = 0; j < n; j++) dkeys.push_back(dkey(i, j));
|
||||
addAllDiff(dkeys);
|
||||
}
|
||||
|
||||
// add col constraints
|
||||
for (size_t j = 0; j < n; j++) {
|
||||
DiscreteKeys dkeys;
|
||||
for (size_t i = 0; i < n; i++) dkeys += dkey(i, j);
|
||||
for (size_t i = 0; i < n; i++) dkeys.push_back(dkey(i, j));
|
||||
addAllDiff(dkeys);
|
||||
}
|
||||
|
||||
|
@ -77,7 +77,7 @@ class Sudoku : public CSP {
|
|||
// Box I,J
|
||||
DiscreteKeys dkeys;
|
||||
for (size_t i = i0; i < i0 + N; i++)
|
||||
for (size_t j = j0; j < j0 + N; j++) dkeys += dkey(i, j);
|
||||
for (size_t j = j0; j < j0 + N; j++) dkeys.push_back(dkey(i, j));
|
||||
addAllDiff(dkeys);
|
||||
j0 += N;
|
||||
}
|
||||
|
|
|
@ -15,38 +15,31 @@
|
|||
* @author Duy-Nguyen Ta
|
||||
**/
|
||||
|
||||
#include <gtsam_unstable/linear/LinearEquality.h>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <gtsam/base/TestableAssertions.h>
|
||||
#include <gtsam/linear/HessianFactor.h>
|
||||
#include <gtsam/linear/VectorValues.h>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
#include <gtsam_unstable/linear/LinearEquality.h>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using namespace boost::assign;
|
||||
|
||||
GTSAM_CONCEPT_TESTABLE_INST (LinearEquality)
|
||||
GTSAM_CONCEPT_TESTABLE_INST(LinearEquality)
|
||||
|
||||
namespace {
|
||||
namespace simple {
|
||||
// Terms we'll use
|
||||
const vector<pair<Key, Matrix> > terms = list_of < pair<Key, Matrix>
|
||||
> (make_pair(5, Matrix3::Identity()))(
|
||||
make_pair(10, 2 * Matrix3::Identity()))(
|
||||
make_pair(15, 3 * Matrix3::Identity()));
|
||||
const vector<pair<Key, Matrix> > terms{
|
||||
make_pair(5, I_3x3), make_pair(10, 2 * I_3x3), make_pair(15, 3 * I_3x3)};
|
||||
|
||||
// RHS and sigmas
|
||||
const Vector b = (Vector(3) << 1., 2., 3.).finished();
|
||||
const SharedDiagonal noise = noiseModel::Constrained::All(3);
|
||||
}
|
||||
}
|
||||
} // namespace simple
|
||||
} // namespace
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(LinearEquality, constructors_and_accessors)
|
||||
{
|
||||
TEST(LinearEquality, constructors_and_accessors) {
|
||||
using namespace simple;
|
||||
|
||||
// Test for using different numbers of terms
|
||||
|
@ -66,8 +59,8 @@ TEST(LinearEquality, constructors_and_accessors)
|
|||
// Two term constructor
|
||||
LinearEquality expected(
|
||||
boost::make_iterator_range(terms.begin(), terms.begin() + 2), b, 0);
|
||||
LinearEquality actual(terms[0].first, terms[0].second,
|
||||
terms[1].first, terms[1].second, b, 0);
|
||||
LinearEquality actual(terms[0].first, terms[0].second, terms[1].first,
|
||||
terms[1].second, b, 0);
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
LONGS_EQUAL((long)terms[1].first, (long)actual.keys().back());
|
||||
EXPECT(assert_equal(terms[1].second, actual.getA(actual.end() - 1)));
|
||||
|
@ -79,8 +72,9 @@ TEST(LinearEquality, constructors_and_accessors)
|
|||
// Three term constructor
|
||||
LinearEquality expected(
|
||||
boost::make_iterator_range(terms.begin(), terms.begin() + 3), b, 0);
|
||||
LinearEquality actual(terms[0].first, terms[0].second,
|
||||
terms[1].first, terms[1].second, terms[2].first, terms[2].second, b, 0);
|
||||
LinearEquality actual(terms[0].first, terms[0].second, terms[1].first,
|
||||
terms[1].second, terms[2].first, terms[2].second, b,
|
||||
0);
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
LONGS_EQUAL((long)terms[2].first, (long)actual.keys().back());
|
||||
EXPECT(assert_equal(terms[2].second, actual.getA(actual.end() - 1)));
|
||||
|
@ -92,39 +86,38 @@ TEST(LinearEquality, constructors_and_accessors)
|
|||
|
||||
/* ************************************************************************* */
|
||||
TEST(LinearEquality, Hessian_conversion) {
|
||||
HessianFactor hessian(0, (Matrix(4,4) <<
|
||||
1.57, 2.695, -1.1, -2.35,
|
||||
2.695, 11.3125, -0.65, -10.225,
|
||||
-1.1, -0.65, 1, 0.5,
|
||||
-2.35, -10.225, 0.5, 9.25).finished(),
|
||||
(Vector(4) << -7.885, -28.5175, 2.75, 25.675).finished(),
|
||||
73.1725);
|
||||
HessianFactor hessian(
|
||||
0,
|
||||
(Matrix(4, 4) << 1.57, 2.695, -1.1, -2.35, 2.695, 11.3125, -0.65, -10.225,
|
||||
-1.1, -0.65, 1, 0.5, -2.35, -10.225, 0.5, 9.25)
|
||||
.finished(),
|
||||
(Vector(4) << -7.885, -28.5175, 2.75, 25.675).finished(), 73.1725);
|
||||
|
||||
try {
|
||||
LinearEquality actual(hessian);
|
||||
EXPECT(false);
|
||||
}
|
||||
catch (const std::runtime_error& exception) {
|
||||
} catch (const std::runtime_error& exception) {
|
||||
EXPECT(true);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(LinearEquality, error)
|
||||
{
|
||||
TEST(LinearEquality, error) {
|
||||
LinearEquality factor(simple::terms, simple::b, 0);
|
||||
|
||||
VectorValues values;
|
||||
values.insert(5, Vector::Constant(3, 1.0));
|
||||
values.insert(10, Vector::Constant(3, 0.5));
|
||||
values.insert(15, Vector::Constant(3, 1.0/3.0));
|
||||
values.insert(15, Vector::Constant(3, 1.0 / 3.0));
|
||||
|
||||
Vector expected_unwhitened(3); expected_unwhitened << 2.0, 1.0, 0.0;
|
||||
Vector expected_unwhitened(3);
|
||||
expected_unwhitened << 2.0, 1.0, 0.0;
|
||||
Vector actual_unwhitened = factor.unweighted_error(values);
|
||||
EXPECT(assert_equal(expected_unwhitened, actual_unwhitened));
|
||||
|
||||
// whitened is meaningless in constraints
|
||||
Vector expected_whitened(3); expected_whitened = expected_unwhitened;
|
||||
Vector expected_whitened(3);
|
||||
expected_whitened = expected_unwhitened;
|
||||
Vector actual_whitened = factor.error_vector(values);
|
||||
EXPECT(assert_equal(expected_whitened, actual_whitened));
|
||||
|
||||
|
@ -134,13 +127,13 @@ TEST(LinearEquality, error)
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(LinearEquality, matrices_NULL)
|
||||
{
|
||||
TEST(LinearEquality, matrices_NULL) {
|
||||
// Make sure everything works with nullptr noise model
|
||||
LinearEquality factor(simple::terms, simple::b, 0);
|
||||
|
||||
Matrix AExpected(3, 9);
|
||||
AExpected << simple::terms[0].second, simple::terms[1].second, simple::terms[2].second;
|
||||
AExpected << simple::terms[0].second, simple::terms[1].second,
|
||||
simple::terms[2].second;
|
||||
Vector rhsExpected = simple::b;
|
||||
Matrix augmentedJacobianExpected(3, 10);
|
||||
augmentedJacobianExpected << AExpected, rhsExpected;
|
||||
|
@ -153,24 +146,25 @@ TEST(LinearEquality, matrices_NULL)
|
|||
// Unwhitened Jacobian
|
||||
EXPECT(assert_equal(AExpected, factor.jacobianUnweighted().first));
|
||||
EXPECT(assert_equal(rhsExpected, factor.jacobianUnweighted().second));
|
||||
EXPECT(assert_equal(augmentedJacobianExpected, factor.augmentedJacobianUnweighted()));
|
||||
EXPECT(assert_equal(augmentedJacobianExpected,
|
||||
factor.augmentedJacobianUnweighted()));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(LinearEquality, matrices)
|
||||
{
|
||||
TEST(LinearEquality, matrices) {
|
||||
// And now witgh a non-unit noise model
|
||||
LinearEquality factor(simple::terms, simple::b, 0);
|
||||
|
||||
Matrix jacobianExpected(3, 9);
|
||||
jacobianExpected << simple::terms[0].second, simple::terms[1].second, simple::terms[2].second;
|
||||
jacobianExpected << simple::terms[0].second, simple::terms[1].second,
|
||||
simple::terms[2].second;
|
||||
Vector rhsExpected = simple::b;
|
||||
Matrix augmentedJacobianExpected(3, 10);
|
||||
augmentedJacobianExpected << jacobianExpected, rhsExpected;
|
||||
|
||||
Matrix augmentedHessianExpected =
|
||||
augmentedJacobianExpected.transpose() * simple::noise->R().transpose()
|
||||
* simple::noise->R() * augmentedJacobianExpected;
|
||||
augmentedJacobianExpected.transpose() * simple::noise->R().transpose() *
|
||||
simple::noise->R() * augmentedJacobianExpected;
|
||||
|
||||
// Whitened Jacobian
|
||||
EXPECT(assert_equal(jacobianExpected, factor.jacobian().first));
|
||||
|
@ -180,35 +174,37 @@ TEST(LinearEquality, matrices)
|
|||
// Unwhitened Jacobian
|
||||
EXPECT(assert_equal(jacobianExpected, factor.jacobianUnweighted().first));
|
||||
EXPECT(assert_equal(rhsExpected, factor.jacobianUnweighted().second));
|
||||
EXPECT(assert_equal(augmentedJacobianExpected, factor.augmentedJacobianUnweighted()));
|
||||
EXPECT(assert_equal(augmentedJacobianExpected,
|
||||
factor.augmentedJacobianUnweighted()));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(LinearEquality, operators )
|
||||
{
|
||||
TEST(LinearEquality, operators) {
|
||||
Matrix I = I_2x2;
|
||||
Vector b = (Vector(2) << 0.2,-0.1).finished();
|
||||
Vector b = (Vector(2) << 0.2, -0.1).finished();
|
||||
LinearEquality lf(1, -I, 2, I, b, 0);
|
||||
|
||||
VectorValues c;
|
||||
c.insert(1, (Vector(2) << 10.,20.).finished());
|
||||
c.insert(2, (Vector(2) << 30.,60.).finished());
|
||||
c.insert(1, (Vector(2) << 10., 20.).finished());
|
||||
c.insert(2, (Vector(2) << 30., 60.).finished());
|
||||
|
||||
// test A*x
|
||||
Vector expectedE = (Vector(2) << 20.,40.).finished();
|
||||
Vector expectedE = (Vector(2) << 20., 40.).finished();
|
||||
Vector actualE = lf * c;
|
||||
EXPECT(assert_equal(expectedE, actualE));
|
||||
|
||||
// test A^e
|
||||
VectorValues expectedX;
|
||||
expectedX.insert(1, (Vector(2) << -20.,-40.).finished());
|
||||
expectedX.insert(1, (Vector(2) << -20., -40.).finished());
|
||||
expectedX.insert(2, (Vector(2) << 20., 40.).finished());
|
||||
VectorValues actualX = VectorValues::Zero(expectedX);
|
||||
lf.transposeMultiplyAdd(1.0, actualE, actualX);
|
||||
EXPECT(assert_equal(expectedX, actualX));
|
||||
|
||||
// test gradient at zero
|
||||
Matrix A; Vector b2; boost::tie(A,b2) = lf.jacobian();
|
||||
Matrix A;
|
||||
Vector b2;
|
||||
boost::tie(A, b2) = lf.jacobian();
|
||||
VectorValues expectedG;
|
||||
expectedG.insert(1, (Vector(2) << 0.2, -0.1).finished());
|
||||
expectedG.insert(2, (Vector(2) << -0.2, 0.1).finished());
|
||||
|
@ -217,16 +213,14 @@ TEST(LinearEquality, operators )
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(LinearEquality, default_error )
|
||||
{
|
||||
TEST(LinearEquality, default_error) {
|
||||
LinearEquality f;
|
||||
double actual = f.error(VectorValues());
|
||||
DOUBLES_EQUAL(0.0, actual, 1e-15);
|
||||
}
|
||||
|
||||
//* ************************************************************************* */
|
||||
TEST(LinearEquality, empty )
|
||||
{
|
||||
TEST(LinearEquality, empty) {
|
||||
// create an empty factor
|
||||
LinearEquality f;
|
||||
EXPECT(f.empty());
|
||||
|
|
|
@ -6,10 +6,6 @@
|
|||
* Description: unit tests for FindSeparator
|
||||
*/
|
||||
|
||||
#include <boost/assign/std/list.hpp> // for operator +=
|
||||
#include <boost/assign/std/set.hpp> // for operator +=
|
||||
#include <boost/assign/std/vector.hpp> // for operator +=
|
||||
using namespace boost::assign;
|
||||
#include <boost/make_shared.hpp>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
|
@ -30,16 +26,16 @@ TEST ( Partition, separatorPartitionByMetis )
|
|||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 4, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(0, NODE_POSE_2D, 1, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(1, NODE_POSE_2D, 2, NODE_POSE_2D));
|
||||
std::vector<size_t> keys; keys += 0, 1, 2, 3, 4;
|
||||
std::vector<size_t> keys{0, 1, 2, 3, 4};
|
||||
|
||||
WorkSpace workspace(5);
|
||||
boost::optional<MetisResult> actual = separatorPartitionByMetis<GenericGraph2D>(graph, keys,
|
||||
workspace, true);
|
||||
|
||||
CHECK(actual.is_initialized());
|
||||
vector<size_t> A_expected; A_expected += 0, 3; // frontal
|
||||
vector<size_t> B_expected; B_expected += 2, 4; // frontal
|
||||
vector<size_t> C_expected; C_expected += 1; // separator
|
||||
vector<size_t> A_expected{0, 3}; // frontal
|
||||
vector<size_t> B_expected{2, 4}; // frontal
|
||||
vector<size_t> C_expected{1}; // separator
|
||||
CHECK(A_expected == actual->A);
|
||||
CHECK(B_expected == actual->B);
|
||||
CHECK(C_expected == actual->C);
|
||||
|
@ -55,16 +51,16 @@ TEST ( Partition, separatorPartitionByMetis2 )
|
|||
graph.push_back(boost::make_shared<GenericFactor2D>(3, NODE_POSE_2D, 6, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(1, NODE_POSE_2D, 2, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 3, NODE_POSE_2D));
|
||||
std::vector<size_t> keys; keys += 1, 2, 3, 5, 6;
|
||||
std::vector<size_t> keys{1, 2, 3, 5, 6};
|
||||
|
||||
WorkSpace workspace(8);
|
||||
boost::optional<MetisResult> actual = separatorPartitionByMetis<GenericGraph2D>(graph, keys,
|
||||
workspace, true);
|
||||
|
||||
CHECK(actual.is_initialized());
|
||||
vector<size_t> A_expected; A_expected += 1, 5; // frontal
|
||||
vector<size_t> B_expected; B_expected += 3, 6; // frontal
|
||||
vector<size_t> C_expected; C_expected += 2; // separator
|
||||
vector<size_t> A_expected{1, 5}; // frontal
|
||||
vector<size_t> B_expected{3, 6}; // frontal
|
||||
vector<size_t> C_expected{2}; // separator
|
||||
CHECK(A_expected == actual->A);
|
||||
CHECK(B_expected == actual->B);
|
||||
CHECK(C_expected == actual->C);
|
||||
|
@ -78,15 +74,15 @@ TEST ( Partition, edgePartitionByMetis )
|
|||
graph.push_back(boost::make_shared<GenericFactor3D>(0, 1, 0, NODE_POSE_3D, NODE_POSE_3D));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(1, 2, 1, NODE_POSE_3D, NODE_POSE_3D));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(2, 3, 2, NODE_POSE_3D, NODE_POSE_3D));
|
||||
std::vector<size_t> keys; keys += 0, 1, 2, 3;
|
||||
std::vector<size_t> keys{0, 1, 2, 3};
|
||||
|
||||
WorkSpace workspace(6);
|
||||
boost::optional<MetisResult> actual = edgePartitionByMetis<GenericGraph3D>(graph, keys,
|
||||
workspace, true);
|
||||
|
||||
CHECK(actual.is_initialized());
|
||||
vector<size_t> A_expected; A_expected += 0, 1; // frontal
|
||||
vector<size_t> B_expected; B_expected += 2, 3; // frontal
|
||||
vector<size_t> A_expected{0, 1}; // frontal
|
||||
vector<size_t> B_expected{2, 3}; // frontal
|
||||
vector<size_t> C_expected; // separator
|
||||
// for(const size_t a: actual->A)
|
||||
// cout << a << " ";
|
||||
|
@ -109,14 +105,14 @@ TEST ( Partition, edgePartitionByMetis2 )
|
|||
graph.push_back(boost::make_shared<GenericFactor3D>(1, 2, 1, NODE_POSE_3D, NODE_POSE_3D, 1));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(2, 3, 2, NODE_POSE_3D, NODE_POSE_3D, 20));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(3, 4, 3, NODE_POSE_3D, NODE_POSE_3D, 1));
|
||||
std::vector<size_t> keys; keys += 0, 1, 2, 3, 4;
|
||||
std::vector<size_t> keys{0, 1, 2, 3, 4};
|
||||
|
||||
WorkSpace workspace(6);
|
||||
boost::optional<MetisResult> actual = edgePartitionByMetis<GenericGraph3D>(graph, keys,
|
||||
workspace, true);
|
||||
CHECK(actual.is_initialized());
|
||||
vector<size_t> A_expected; A_expected += 0, 1; // frontal
|
||||
vector<size_t> B_expected; B_expected += 2, 3, 4; // frontal
|
||||
vector<size_t> A_expected{0, 1}; // frontal
|
||||
vector<size_t> B_expected{2, 3, 4}; // frontal
|
||||
vector<size_t> C_expected; // separator
|
||||
CHECK(A_expected == actual->A);
|
||||
CHECK(B_expected == actual->B);
|
||||
|
@ -133,7 +129,7 @@ TEST ( Partition, findSeparator )
|
|||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 4, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(0, NODE_POSE_2D, 1, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(1, NODE_POSE_2D, 2, NODE_POSE_2D));
|
||||
std::vector<size_t> keys; keys += 0, 1, 2, 3, 4;
|
||||
std::vector<size_t> keys{0, 1, 2, 3, 4};
|
||||
|
||||
WorkSpace workspace(5);
|
||||
int minNodesPerMap = -1;
|
||||
|
@ -159,7 +155,7 @@ TEST ( Partition, findSeparator2 )
|
|||
graph.push_back(boost::make_shared<GenericFactor2D>(3, NODE_POSE_2D, 6, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(1, NODE_POSE_2D, 2, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 3, NODE_POSE_2D));
|
||||
std::vector<size_t> keys; keys += 1, 2, 3, 5, 6;
|
||||
std::vector<size_t> keys{1, 2, 3, 5, 6};
|
||||
|
||||
WorkSpace workspace(8);
|
||||
int minNodesPerMap = -1;
|
||||
|
|
|
@ -10,10 +10,6 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include <boost/assign/std/list.hpp> // for operator +=
|
||||
#include <boost/assign/std/set.hpp> // for operator +=
|
||||
#include <boost/assign/std/vector.hpp> // for operator +=
|
||||
using namespace boost::assign;
|
||||
#include <boost/make_shared.hpp>
|
||||
|
||||
#include <map>
|
||||
|
@ -44,13 +40,13 @@ TEST ( GenerciGraph, findIslands )
|
|||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 3, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(4, NODE_POSE_2D, 5, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(5, NODE_POSE_2D, 6, NODE_POSE_2D));
|
||||
std::vector<size_t> keys; keys += 1, 2, 3, 4, 5, 6, 7, 8, 9;
|
||||
std::vector<size_t> keys{1, 2, 3, 4, 5, 6, 7, 8, 9};
|
||||
|
||||
WorkSpace workspace(10); // from 0 to 9
|
||||
list<vector<size_t> > islands = findIslands(graph, keys, workspace, 7, 2);
|
||||
LONGS_EQUAL(2, islands.size());
|
||||
vector<size_t> island1; island1 += 1, 2, 3, 7, 8;
|
||||
vector<size_t> island2; island2 += 4, 5, 6, 9;
|
||||
vector<size_t> island1{1, 2, 3, 7, 8};
|
||||
vector<size_t> island2{4, 5, 6, 9};
|
||||
CHECK(island1 == islands.front());
|
||||
CHECK(island2 == islands.back());
|
||||
}
|
||||
|
@ -77,12 +73,12 @@ TEST( GenerciGraph, findIslands2 )
|
|||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 3, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(4, NODE_POSE_2D, 5, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(5, NODE_POSE_2D, 6, NODE_POSE_2D));
|
||||
std::vector<size_t> keys; keys += 1, 2, 3, 4, 5, 6, 7, 8;
|
||||
std::vector<size_t> keys{1, 2, 3, 4, 5, 6, 7, 8};
|
||||
|
||||
WorkSpace workspace(15); // from 0 to 8, but testing over-allocation here
|
||||
list<vector<size_t> > islands = findIslands(graph, keys, workspace, 7, 2);
|
||||
LONGS_EQUAL(1, islands.size());
|
||||
vector<size_t> island1; island1 += 1, 2, 3, 4, 5, 6, 7, 8;
|
||||
vector<size_t> island1{1, 2, 3, 4, 5, 6, 7, 8};
|
||||
CHECK(island1 == islands.front());
|
||||
}
|
||||
|
||||
|
@ -99,13 +95,13 @@ TEST ( GenerciGraph, findIslands3 )
|
|||
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 3, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(3, NODE_POSE_2D, 4, NODE_POSE_2D));
|
||||
std::vector<size_t> keys; keys += 1, 2, 3, 4, 5, 6;
|
||||
std::vector<size_t> keys{1, 2, 3, 4, 5, 6};
|
||||
|
||||
WorkSpace workspace(7); // from 0 to 9
|
||||
list<vector<size_t> > islands = findIslands(graph, keys, workspace, 7, 2);
|
||||
LONGS_EQUAL(2, islands.size());
|
||||
vector<size_t> island1; island1 += 1, 5;
|
||||
vector<size_t> island2; island2 += 2, 3, 4, 6;
|
||||
vector<size_t> island1{1, 5};
|
||||
vector<size_t> island2{2, 3, 4, 6};
|
||||
CHECK(island1 == islands.front());
|
||||
CHECK(island2 == islands.back());
|
||||
}
|
||||
|
@ -119,13 +115,13 @@ TEST ( GenerciGraph, findIslands4 )
|
|||
GenericGraph2D graph;
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(3, NODE_POSE_2D, 4, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(7, NODE_POSE_2D, 7, NODE_LANDMARK_2D));
|
||||
std::vector<size_t> keys; keys += 3, 4, 7;
|
||||
std::vector<size_t> keys{3, 4, 7};
|
||||
|
||||
WorkSpace workspace(8); // from 0 to 7
|
||||
list<vector<size_t> > islands = findIslands(graph, keys, workspace, 7, 2);
|
||||
LONGS_EQUAL(2, islands.size());
|
||||
vector<size_t> island1; island1 += 3, 4;
|
||||
vector<size_t> island2; island2 += 7;
|
||||
vector<size_t> island1{3, 4};
|
||||
vector<size_t> island2{7};
|
||||
CHECK(island1 == islands.front());
|
||||
CHECK(island2 == islands.back());
|
||||
}
|
||||
|
@ -147,13 +143,13 @@ TEST ( GenerciGraph, findIslands5 )
|
|||
graph.push_back(boost::make_shared<GenericFactor2D>(1, NODE_POSE_2D, 3, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 4, NODE_POSE_2D));
|
||||
|
||||
std::vector<size_t> keys; keys += 1, 2, 3, 4, 5;
|
||||
std::vector<size_t> keys{1, 2, 3, 4, 5};
|
||||
|
||||
WorkSpace workspace(6); // from 0 to 5
|
||||
list<vector<size_t> > islands = findIslands(graph, keys, workspace, 7, 2);
|
||||
LONGS_EQUAL(2, islands.size());
|
||||
vector<size_t> island1; island1 += 1, 3, 5;
|
||||
vector<size_t> island2; island2 += 2, 4;
|
||||
vector<size_t> island1{1, 3, 5};
|
||||
vector<size_t> island2{2, 4};
|
||||
CHECK(island1 == islands.front());
|
||||
CHECK(island2 == islands.back());
|
||||
}
|
||||
|
@ -235,8 +231,8 @@ TEST ( GenericGraph, reduceGenericGraph2 )
|
|||
/* ************************************************************************* */
|
||||
TEST ( GenerciGraph, hasCommonCamera )
|
||||
{
|
||||
std::set<size_t> cameras1; cameras1 += 1, 2, 3, 4, 5;
|
||||
std::set<size_t> cameras2; cameras2 += 8, 7, 6, 5;
|
||||
std::set<size_t> cameras1{1, 2, 3, 4, 5};
|
||||
std::set<size_t> cameras2{8, 7, 6, 5};
|
||||
bool actual = hasCommonCamera(cameras1, cameras2);
|
||||
CHECK(actual);
|
||||
}
|
||||
|
@ -244,8 +240,8 @@ TEST ( GenerciGraph, hasCommonCamera )
|
|||
/* ************************************************************************* */
|
||||
TEST ( GenerciGraph, hasCommonCamera2 )
|
||||
{
|
||||
std::set<size_t> cameras1; cameras1 += 1, 3, 5, 7;
|
||||
std::set<size_t> cameras2; cameras2 += 2, 4, 6, 8, 10;
|
||||
std::set<size_t> cameras1{1, 3, 5, 7};
|
||||
std::set<size_t> cameras2{2, 4, 6, 8, 10};
|
||||
bool actual = hasCommonCamera(cameras1, cameras2);
|
||||
CHECK(!actual);
|
||||
}
|
||||
|
|
|
@ -6,10 +6,6 @@
|
|||
* Description: unit tests for NestedDissection
|
||||
*/
|
||||
|
||||
#include <boost/assign/std/list.hpp> // for operator +=
|
||||
#include <boost/assign/std/set.hpp> // for operator +=
|
||||
#include <boost/assign/std/vector.hpp> // for operator +=
|
||||
using namespace boost::assign;
|
||||
#include <boost/make_shared.hpp>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue