Merge pull request #1311 from borglab/hybrid/switching-0-index
commit
7f8bd8a511
|
@ -134,26 +134,26 @@ struct Switching {
|
||||||
Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1,
|
Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1,
|
||||||
std::vector<double> measurements = {})
|
std::vector<double> measurements = {})
|
||||||
: K(K) {
|
: K(K) {
|
||||||
// Create DiscreteKeys for binary K modes, modes[0] will not be used.
|
// Create DiscreteKeys for binary K modes.
|
||||||
for (size_t k = 0; k <= K; k++) {
|
for (size_t k = 0; k < K; k++) {
|
||||||
modes.emplace_back(M(k), 2);
|
modes.emplace_back(M(k), 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
// If measurements are not provided, we just have the robot moving forward.
|
// If measurements are not provided, we just have the robot moving forward.
|
||||||
if (measurements.size() == 0) {
|
if (measurements.size() == 0) {
|
||||||
for (size_t k = 1; k <= K; k++) {
|
for (size_t k = 0; k < K; k++) {
|
||||||
measurements.push_back(k - 1);
|
measurements.push_back(k);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create hybrid factor graph.
|
// Create hybrid factor graph.
|
||||||
// Add a prior on X(1).
|
// Add a prior on X(1).
|
||||||
auto prior = boost::make_shared<PriorFactor<double>>(
|
auto prior = boost::make_shared<PriorFactor<double>>(
|
||||||
X(1), measurements.at(0), noiseModel::Isotropic::Sigma(1, prior_sigma));
|
X(0), measurements.at(0), noiseModel::Isotropic::Sigma(1, prior_sigma));
|
||||||
nonlinearFactorGraph.push_nonlinear(prior);
|
nonlinearFactorGraph.push_nonlinear(prior);
|
||||||
|
|
||||||
// Add "motion models".
|
// Add "motion models".
|
||||||
for (size_t k = 1; k < K; k++) {
|
for (size_t k = 0; k < K - 1; k++) {
|
||||||
KeyVector keys = {X(k), X(k + 1)};
|
KeyVector keys = {X(k), X(k + 1)};
|
||||||
auto motion_models = motionModels(k, between_sigma);
|
auto motion_models = motionModels(k, between_sigma);
|
||||||
std::vector<NonlinearFactor::shared_ptr> components;
|
std::vector<NonlinearFactor::shared_ptr> components;
|
||||||
|
@ -166,17 +166,17 @@ struct Switching {
|
||||||
|
|
||||||
// Add measurement factors
|
// Add measurement factors
|
||||||
auto measurement_noise = noiseModel::Isotropic::Sigma(1, prior_sigma);
|
auto measurement_noise = noiseModel::Isotropic::Sigma(1, prior_sigma);
|
||||||
for (size_t k = 2; k <= K; k++) {
|
for (size_t k = 1; k < K; k++) {
|
||||||
nonlinearFactorGraph.emplace_nonlinear<PriorFactor<double>>(
|
nonlinearFactorGraph.emplace_nonlinear<PriorFactor<double>>(
|
||||||
X(k), measurements.at(k - 1), measurement_noise);
|
X(k), measurements.at(k), measurement_noise);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add "mode chain"
|
// Add "mode chain"
|
||||||
addModeChain(&nonlinearFactorGraph);
|
addModeChain(&nonlinearFactorGraph);
|
||||||
|
|
||||||
// Create the linearization point.
|
// Create the linearization point.
|
||||||
for (size_t k = 1; k <= K; k++) {
|
for (size_t k = 0; k < K; k++) {
|
||||||
linearizationPoint.insert<double>(X(k), static_cast<double>(k));
|
linearizationPoint.insert<double>(X(k), static_cast<double>(k + 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
// The ground truth is robot moving forward
|
// The ground truth is robot moving forward
|
||||||
|
@ -195,11 +195,16 @@ struct Switching {
|
||||||
return {still, moving};
|
return {still, moving};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add "mode chain" to HybridNonlinearFactorGraph
|
/**
|
||||||
|
* @brief Add "mode chain" to HybridNonlinearFactorGraph from M(0) to M(K-2).
|
||||||
|
* E.g. if K=4, we want M0, M1 and M2.
|
||||||
|
*
|
||||||
|
* @param fg The nonlinear factor graph to which the mode chain is added.
|
||||||
|
*/
|
||||||
void addModeChain(HybridNonlinearFactorGraph *fg) {
|
void addModeChain(HybridNonlinearFactorGraph *fg) {
|
||||||
auto prior = boost::make_shared<DiscreteDistribution>(modes[1], "1/1");
|
auto prior = boost::make_shared<DiscreteDistribution>(modes[0], "1/1");
|
||||||
fg->push_discrete(prior);
|
fg->push_discrete(prior);
|
||||||
for (size_t k = 1; k < K - 1; k++) {
|
for (size_t k = 0; k < K - 2; k++) {
|
||||||
auto parents = {modes[k]};
|
auto parents = {modes[k]};
|
||||||
auto conditional = boost::make_shared<DiscreteConditional>(
|
auto conditional = boost::make_shared<DiscreteConditional>(
|
||||||
modes[k + 1], parents, "1/2 3/2");
|
modes[k + 1], parents, "1/2 3/2");
|
||||||
|
@ -207,11 +212,16 @@ struct Switching {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add "mode chain" to HybridGaussianFactorGraph
|
/**
|
||||||
|
* @brief Add "mode chain" to HybridGaussianFactorGraph from M(0) to M(K-2).
|
||||||
|
* E.g. if K=4, we want M0, M1 and M2.
|
||||||
|
*
|
||||||
|
* @param fg The gaussian factor graph to which the mode chain is added.
|
||||||
|
*/
|
||||||
void addModeChain(HybridGaussianFactorGraph *fg) {
|
void addModeChain(HybridGaussianFactorGraph *fg) {
|
||||||
auto prior = boost::make_shared<DiscreteDistribution>(modes[1], "1/1");
|
auto prior = boost::make_shared<DiscreteDistribution>(modes[0], "1/1");
|
||||||
fg->push_discrete(prior);
|
fg->push_discrete(prior);
|
||||||
for (size_t k = 1; k < K - 1; k++) {
|
for (size_t k = 0; k < K - 2; k++) {
|
||||||
auto parents = {modes[k]};
|
auto parents = {modes[k]};
|
||||||
auto conditional = boost::make_shared<DiscreteConditional>(
|
auto conditional = boost::make_shared<DiscreteConditional>(
|
||||||
modes[k + 1], parents, "1/2 3/2");
|
modes[k + 1], parents, "1/2 3/2");
|
||||||
|
|
|
@ -82,9 +82,9 @@ TEST(HybridBayesNet, Choose) {
|
||||||
s.linearizedFactorGraph.eliminatePartialSequential(ordering);
|
s.linearizedFactorGraph.eliminatePartialSequential(ordering);
|
||||||
|
|
||||||
DiscreteValues assignment;
|
DiscreteValues assignment;
|
||||||
|
assignment[M(0)] = 1;
|
||||||
assignment[M(1)] = 1;
|
assignment[M(1)] = 1;
|
||||||
assignment[M(2)] = 1;
|
assignment[M(2)] = 0;
|
||||||
assignment[M(3)] = 0;
|
|
||||||
|
|
||||||
GaussianBayesNet gbn = hybridBayesNet->choose(assignment);
|
GaussianBayesNet gbn = hybridBayesNet->choose(assignment);
|
||||||
|
|
||||||
|
@ -120,20 +120,20 @@ TEST(HybridBayesNet, OptimizeAssignment) {
|
||||||
s.linearizedFactorGraph.eliminatePartialSequential(ordering);
|
s.linearizedFactorGraph.eliminatePartialSequential(ordering);
|
||||||
|
|
||||||
DiscreteValues assignment;
|
DiscreteValues assignment;
|
||||||
|
assignment[M(0)] = 1;
|
||||||
assignment[M(1)] = 1;
|
assignment[M(1)] = 1;
|
||||||
assignment[M(2)] = 1;
|
assignment[M(2)] = 1;
|
||||||
assignment[M(3)] = 1;
|
|
||||||
|
|
||||||
VectorValues delta = hybridBayesNet->optimize(assignment);
|
VectorValues delta = hybridBayesNet->optimize(assignment);
|
||||||
|
|
||||||
// The linearization point has the same value as the key index,
|
// The linearization point has the same value as the key index,
|
||||||
// e.g. X(1) = 1, X(2) = 2,
|
// e.g. X(0) = 1, X(1) = 2,
|
||||||
// but the factors specify X(k) = k-1, so delta should be -1.
|
// but the factors specify X(k) = k-1, so delta should be -1.
|
||||||
VectorValues expected_delta;
|
VectorValues expected_delta;
|
||||||
|
expected_delta.insert(make_pair(X(0), -Vector1::Ones()));
|
||||||
expected_delta.insert(make_pair(X(1), -Vector1::Ones()));
|
expected_delta.insert(make_pair(X(1), -Vector1::Ones()));
|
||||||
expected_delta.insert(make_pair(X(2), -Vector1::Ones()));
|
expected_delta.insert(make_pair(X(2), -Vector1::Ones()));
|
||||||
expected_delta.insert(make_pair(X(3), -Vector1::Ones()));
|
expected_delta.insert(make_pair(X(3), -Vector1::Ones()));
|
||||||
expected_delta.insert(make_pair(X(4), -Vector1::Ones()));
|
|
||||||
|
|
||||||
EXPECT(assert_equal(expected_delta, delta));
|
EXPECT(assert_equal(expected_delta, delta));
|
||||||
}
|
}
|
||||||
|
@ -150,16 +150,16 @@ TEST(HybridBayesNet, Optimize) {
|
||||||
HybridValues delta = hybridBayesNet->optimize();
|
HybridValues delta = hybridBayesNet->optimize();
|
||||||
|
|
||||||
DiscreteValues expectedAssignment;
|
DiscreteValues expectedAssignment;
|
||||||
expectedAssignment[M(1)] = 1;
|
expectedAssignment[M(0)] = 1;
|
||||||
expectedAssignment[M(2)] = 0;
|
expectedAssignment[M(1)] = 0;
|
||||||
expectedAssignment[M(3)] = 1;
|
expectedAssignment[M(2)] = 1;
|
||||||
EXPECT(assert_equal(expectedAssignment, delta.discrete()));
|
EXPECT(assert_equal(expectedAssignment, delta.discrete()));
|
||||||
|
|
||||||
VectorValues expectedValues;
|
VectorValues expectedValues;
|
||||||
expectedValues.insert(X(1), -0.999904 * Vector1::Ones());
|
expectedValues.insert(X(0), -0.999904 * Vector1::Ones());
|
||||||
expectedValues.insert(X(2), -0.99029 * Vector1::Ones());
|
expectedValues.insert(X(1), -0.99029 * Vector1::Ones());
|
||||||
expectedValues.insert(X(3), -1.00971 * Vector1::Ones());
|
expectedValues.insert(X(2), -1.00971 * Vector1::Ones());
|
||||||
expectedValues.insert(X(4), -1.0001 * Vector1::Ones());
|
expectedValues.insert(X(3), -1.0001 * Vector1::Ones());
|
||||||
|
|
||||||
EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5));
|
EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5));
|
||||||
}
|
}
|
||||||
|
@ -175,10 +175,10 @@ TEST(HybridBayesNet, OptimizeMultifrontal) {
|
||||||
HybridValues delta = hybridBayesTree->optimize();
|
HybridValues delta = hybridBayesTree->optimize();
|
||||||
|
|
||||||
VectorValues expectedValues;
|
VectorValues expectedValues;
|
||||||
expectedValues.insert(X(1), -0.999904 * Vector1::Ones());
|
expectedValues.insert(X(0), -0.999904 * Vector1::Ones());
|
||||||
expectedValues.insert(X(2), -0.99029 * Vector1::Ones());
|
expectedValues.insert(X(1), -0.99029 * Vector1::Ones());
|
||||||
expectedValues.insert(X(3), -1.00971 * Vector1::Ones());
|
expectedValues.insert(X(2), -1.00971 * Vector1::Ones());
|
||||||
expectedValues.insert(X(4), -1.0001 * Vector1::Ones());
|
expectedValues.insert(X(3), -1.0001 * Vector1::Ones());
|
||||||
|
|
||||||
EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5));
|
EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5));
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,9 +60,9 @@ TEST(HybridBayesTree, OptimizeAssignment) {
|
||||||
isam.update(graph1);
|
isam.update(graph1);
|
||||||
|
|
||||||
DiscreteValues assignment;
|
DiscreteValues assignment;
|
||||||
|
assignment[M(0)] = 1;
|
||||||
assignment[M(1)] = 1;
|
assignment[M(1)] = 1;
|
||||||
assignment[M(2)] = 1;
|
assignment[M(2)] = 1;
|
||||||
assignment[M(3)] = 1;
|
|
||||||
|
|
||||||
VectorValues delta = isam.optimize(assignment);
|
VectorValues delta = isam.optimize(assignment);
|
||||||
|
|
||||||
|
@ -70,16 +70,16 @@ TEST(HybridBayesTree, OptimizeAssignment) {
|
||||||
// e.g. X(1) = 1, X(2) = 2,
|
// e.g. X(1) = 1, X(2) = 2,
|
||||||
// but the factors specify X(k) = k-1, so delta should be -1.
|
// but the factors specify X(k) = k-1, so delta should be -1.
|
||||||
VectorValues expected_delta;
|
VectorValues expected_delta;
|
||||||
|
expected_delta.insert(make_pair(X(0), -Vector1::Ones()));
|
||||||
expected_delta.insert(make_pair(X(1), -Vector1::Ones()));
|
expected_delta.insert(make_pair(X(1), -Vector1::Ones()));
|
||||||
expected_delta.insert(make_pair(X(2), -Vector1::Ones()));
|
expected_delta.insert(make_pair(X(2), -Vector1::Ones()));
|
||||||
expected_delta.insert(make_pair(X(3), -Vector1::Ones()));
|
expected_delta.insert(make_pair(X(3), -Vector1::Ones()));
|
||||||
expected_delta.insert(make_pair(X(4), -Vector1::Ones()));
|
|
||||||
|
|
||||||
EXPECT(assert_equal(expected_delta, delta));
|
EXPECT(assert_equal(expected_delta, delta));
|
||||||
|
|
||||||
// Create ordering.
|
// Create ordering.
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
for (size_t k = 1; k <= s.K; k++) ordering += X(k);
|
for (size_t k = 0; k < s.K; k++) ordering += X(k);
|
||||||
|
|
||||||
HybridBayesNet::shared_ptr hybridBayesNet;
|
HybridBayesNet::shared_ptr hybridBayesNet;
|
||||||
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
|
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
|
||||||
|
@ -123,7 +123,7 @@ TEST(HybridBayesTree, Optimize) {
|
||||||
|
|
||||||
// Create ordering.
|
// Create ordering.
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
for (size_t k = 1; k <= s.K; k++) ordering += X(k);
|
for (size_t k = 0; k < s.K; k++) ordering += X(k);
|
||||||
|
|
||||||
HybridBayesNet::shared_ptr hybridBayesNet;
|
HybridBayesNet::shared_ptr hybridBayesNet;
|
||||||
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
|
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
|
||||||
|
|
|
@ -82,9 +82,9 @@ TEST(HybridNonlinearISAM, Incremental) {
|
||||||
HybridNonlinearFactorGraph graph;
|
HybridNonlinearFactorGraph graph;
|
||||||
Values initial;
|
Values initial;
|
||||||
|
|
||||||
// Add the X(1) prior
|
// Add the X(0) prior
|
||||||
graph.push_back(switching.nonlinearFactorGraph.at(0));
|
graph.push_back(switching.nonlinearFactorGraph.at(0));
|
||||||
initial.insert(X(1), switching.linearizationPoint.at<double>(X(1)));
|
initial.insert(X(0), switching.linearizationPoint.at<double>(X(0)));
|
||||||
|
|
||||||
HybridGaussianFactorGraph linearized;
|
HybridGaussianFactorGraph linearized;
|
||||||
HybridGaussianFactorGraph bayesNet;
|
HybridGaussianFactorGraph bayesNet;
|
||||||
|
@ -96,7 +96,7 @@ TEST(HybridNonlinearISAM, Incremental) {
|
||||||
// Measurement
|
// Measurement
|
||||||
graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1));
|
graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1));
|
||||||
|
|
||||||
initial.insert(X(k + 1), switching.linearizationPoint.at<double>(X(k + 1)));
|
initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
|
||||||
|
|
||||||
bayesNet = smoother.hybridBayesNet();
|
bayesNet = smoother.hybridBayesNet();
|
||||||
linearized = *graph.linearize(initial);
|
linearized = *graph.linearize(initial);
|
||||||
|
@ -111,13 +111,13 @@ TEST(HybridNonlinearISAM, Incremental) {
|
||||||
|
|
||||||
DiscreteValues expected_discrete;
|
DiscreteValues expected_discrete;
|
||||||
for (size_t k = 0; k < K - 1; k++) {
|
for (size_t k = 0; k < K - 1; k++) {
|
||||||
expected_discrete[M(k + 1)] = discrete_seq[k];
|
expected_discrete[M(k)] = discrete_seq[k];
|
||||||
}
|
}
|
||||||
EXPECT(assert_equal(expected_discrete, delta.discrete()));
|
EXPECT(assert_equal(expected_discrete, delta.discrete()));
|
||||||
|
|
||||||
Values expected_continuous;
|
Values expected_continuous;
|
||||||
for (size_t k = 0; k < K; k++) {
|
for (size_t k = 0; k < K; k++) {
|
||||||
expected_continuous.insert(X(k + 1), measurements[k]);
|
expected_continuous.insert(X(k), measurements[k]);
|
||||||
}
|
}
|
||||||
EXPECT(assert_equal(expected_continuous, result));
|
EXPECT(assert_equal(expected_continuous, result));
|
||||||
}
|
}
|
||||||
|
|
|
@ -531,34 +531,34 @@ TEST(HybridGaussianFactorGraph, Conditionals) {
|
||||||
|
|
||||||
hfg.push_back(switching.linearizedFactorGraph.at(0)); // P(X1)
|
hfg.push_back(switching.linearizedFactorGraph.at(0)); // P(X1)
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
ordering.push_back(X(1));
|
ordering.push_back(X(0));
|
||||||
HybridBayesNet::shared_ptr bayes_net = hfg.eliminateSequential(ordering);
|
HybridBayesNet::shared_ptr bayes_net = hfg.eliminateSequential(ordering);
|
||||||
|
|
||||||
hfg.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1)
|
hfg.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1)
|
||||||
hfg.push_back(*bayes_net);
|
hfg.push_back(*bayes_net);
|
||||||
hfg.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2)
|
hfg.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2)
|
||||||
hfg.push_back(switching.linearizedFactorGraph.at(5)); // P(M1)
|
hfg.push_back(switching.linearizedFactorGraph.at(5)); // P(M1)
|
||||||
|
ordering.push_back(X(1));
|
||||||
ordering.push_back(X(2));
|
ordering.push_back(X(2));
|
||||||
ordering.push_back(X(3));
|
ordering.push_back(M(0));
|
||||||
ordering.push_back(M(1));
|
ordering.push_back(M(1));
|
||||||
ordering.push_back(M(2));
|
|
||||||
|
|
||||||
bayes_net = hfg.eliminateSequential(ordering);
|
bayes_net = hfg.eliminateSequential(ordering);
|
||||||
|
|
||||||
HybridValues result = bayes_net->optimize();
|
HybridValues result = bayes_net->optimize();
|
||||||
|
|
||||||
Values expected_continuous;
|
Values expected_continuous;
|
||||||
expected_continuous.insert<double>(X(1), 0);
|
expected_continuous.insert<double>(X(0), 0);
|
||||||
expected_continuous.insert<double>(X(2), 1);
|
expected_continuous.insert<double>(X(1), 1);
|
||||||
expected_continuous.insert<double>(X(3), 2);
|
expected_continuous.insert<double>(X(2), 2);
|
||||||
expected_continuous.insert<double>(X(4), 4);
|
expected_continuous.insert<double>(X(3), 4);
|
||||||
Values result_continuous =
|
Values result_continuous =
|
||||||
switching.linearizationPoint.retract(result.continuous());
|
switching.linearizationPoint.retract(result.continuous());
|
||||||
EXPECT(assert_equal(expected_continuous, result_continuous));
|
EXPECT(assert_equal(expected_continuous, result_continuous));
|
||||||
|
|
||||||
DiscreteValues expected_discrete;
|
DiscreteValues expected_discrete;
|
||||||
|
expected_discrete[M(0)] = 1;
|
||||||
expected_discrete[M(1)] = 1;
|
expected_discrete[M(1)] = 1;
|
||||||
expected_discrete[M(2)] = 1;
|
|
||||||
EXPECT(assert_equal(expected_discrete, result.discrete()));
|
EXPECT(assert_equal(expected_discrete, result.discrete()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -54,40 +54,40 @@ TEST(HybridGaussianElimination, IncrementalElimination) {
|
||||||
// Create initial factor graph
|
// Create initial factor graph
|
||||||
// * * *
|
// * * *
|
||||||
// | | |
|
// | | |
|
||||||
// X1 -*- X2 -*- X3
|
// X0 -*- X1 -*- X2
|
||||||
// \*-M1-*/
|
// \*-M0-*/
|
||||||
graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1)
|
graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X0)
|
||||||
graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1)
|
graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X0, X1 | M0)
|
||||||
graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2)
|
graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X1, X2 | M1)
|
||||||
graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1)
|
graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M0)
|
||||||
|
|
||||||
// Run update step
|
// Run update step
|
||||||
isam.update(graph1);
|
isam.update(graph1);
|
||||||
|
|
||||||
// Check that after update we have 3 hybrid Bayes net nodes:
|
// Check that after update we have 2 hybrid Bayes net nodes:
|
||||||
// P(X1 | X2, M1) and P(X2, X3 | M1, M2), P(M1, M2)
|
// P(X0 | X1, M0) and P(X1, X2 | M0, M1), P(M0, M1)
|
||||||
EXPECT_LONGS_EQUAL(3, isam.size());
|
EXPECT_LONGS_EQUAL(3, isam.size());
|
||||||
EXPECT(isam[X(1)]->conditional()->frontals() == KeyVector{X(1)});
|
EXPECT(isam[X(0)]->conditional()->frontals() == KeyVector{X(0)});
|
||||||
EXPECT(isam[X(1)]->conditional()->parents() == KeyVector({X(2), M(1)}));
|
EXPECT(isam[X(0)]->conditional()->parents() == KeyVector({X(1), M(0)}));
|
||||||
EXPECT(isam[X(2)]->conditional()->frontals() == KeyVector({X(2), X(3)}));
|
EXPECT(isam[X(1)]->conditional()->frontals() == KeyVector({X(1), X(2)}));
|
||||||
EXPECT(isam[X(2)]->conditional()->parents() == KeyVector({M(1), M(2)}));
|
EXPECT(isam[X(1)]->conditional()->parents() == KeyVector({M(0), M(1)}));
|
||||||
|
|
||||||
/********************************************************/
|
/********************************************************/
|
||||||
// New factor graph for incremental update.
|
// New factor graph for incremental update.
|
||||||
HybridGaussianFactorGraph graph2;
|
HybridGaussianFactorGraph graph2;
|
||||||
|
|
||||||
graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2)
|
graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X1)
|
||||||
graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3)
|
graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X2)
|
||||||
graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2)
|
graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M0, M1)
|
||||||
|
|
||||||
isam.update(graph2);
|
isam.update(graph2);
|
||||||
|
|
||||||
// Check that after the second update we have
|
// Check that after the second update we have
|
||||||
// 1 additional hybrid Bayes net node:
|
// 1 additional hybrid Bayes net node:
|
||||||
// P(X2, X3 | M1, M2)
|
// P(X1, X2 | M0, M1)
|
||||||
EXPECT_LONGS_EQUAL(3, isam.size());
|
EXPECT_LONGS_EQUAL(3, isam.size());
|
||||||
EXPECT(isam[X(3)]->conditional()->frontals() == KeyVector({X(2), X(3)}));
|
EXPECT(isam[X(2)]->conditional()->frontals() == KeyVector({X(1), X(2)}));
|
||||||
EXPECT(isam[X(3)]->conditional()->parents() == KeyVector({M(1), M(2)}));
|
EXPECT(isam[X(2)]->conditional()->parents() == KeyVector({M(0), M(1)}));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ****************************************************************************/
|
/* ****************************************************************************/
|
||||||
|
@ -100,104 +100,104 @@ TEST(HybridGaussianElimination, IncrementalInference) {
|
||||||
// Create initial factor graph
|
// Create initial factor graph
|
||||||
// * * *
|
// * * *
|
||||||
// | | |
|
// | | |
|
||||||
// X1 -*- X2 -*- X3
|
// X0 -*- X1 -*- X2
|
||||||
// | |
|
// | |
|
||||||
// *-M1 - * - M2
|
// *-M0 - * - M1
|
||||||
graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1)
|
graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X0)
|
||||||
graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1)
|
graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X0, X1 | M0)
|
||||||
graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2)
|
graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X1)
|
||||||
graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1)
|
graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M0)
|
||||||
|
|
||||||
// Run update step
|
// Run update step
|
||||||
isam.update(graph1);
|
isam.update(graph1);
|
||||||
|
|
||||||
auto discreteConditional_m1 =
|
auto discreteConditional_m0 =
|
||||||
isam[M(1)]->conditional()->asDiscreteConditional();
|
isam[M(0)]->conditional()->asDiscreteConditional();
|
||||||
EXPECT(discreteConditional_m1->keys() == KeyVector({M(1)}));
|
EXPECT(discreteConditional_m0->keys() == KeyVector({M(0)}));
|
||||||
|
|
||||||
/********************************************************/
|
/********************************************************/
|
||||||
// New factor graph for incremental update.
|
// New factor graph for incremental update.
|
||||||
HybridGaussianFactorGraph graph2;
|
HybridGaussianFactorGraph graph2;
|
||||||
|
|
||||||
graph2.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2)
|
graph2.push_back(switching.linearizedFactorGraph.at(2)); // P(X1, X2 | M1)
|
||||||
graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3)
|
graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X2)
|
||||||
graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2)
|
graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M0, M1)
|
||||||
|
|
||||||
isam.update(graph2);
|
isam.update(graph2);
|
||||||
|
|
||||||
/********************************************************/
|
/********************************************************/
|
||||||
// Run batch elimination so we can compare results.
|
// Run batch elimination so we can compare results.
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
|
ordering += X(0);
|
||||||
ordering += X(1);
|
ordering += X(1);
|
||||||
ordering += X(2);
|
ordering += X(2);
|
||||||
ordering += X(3);
|
|
||||||
|
|
||||||
// Now we calculate the actual factors using full elimination
|
// Now we calculate the expected factors using full elimination
|
||||||
HybridBayesTree::shared_ptr expectedHybridBayesTree;
|
HybridBayesTree::shared_ptr expectedHybridBayesTree;
|
||||||
HybridGaussianFactorGraph::shared_ptr expectedRemainingGraph;
|
HybridGaussianFactorGraph::shared_ptr expectedRemainingGraph;
|
||||||
std::tie(expectedHybridBayesTree, expectedRemainingGraph) =
|
std::tie(expectedHybridBayesTree, expectedRemainingGraph) =
|
||||||
switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering);
|
switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering);
|
||||||
|
|
||||||
|
// The densities on X(0) should be the same
|
||||||
|
auto x0_conditional =
|
||||||
|
dynamic_pointer_cast<GaussianMixture>(isam[X(0)]->conditional()->inner());
|
||||||
|
auto expected_x0_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||||
|
(*expectedHybridBayesTree)[X(0)]->conditional()->inner());
|
||||||
|
EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional));
|
||||||
|
|
||||||
// The densities on X(1) should be the same
|
// The densities on X(1) should be the same
|
||||||
auto x1_conditional =
|
auto x1_conditional =
|
||||||
dynamic_pointer_cast<GaussianMixture>(isam[X(1)]->conditional()->inner());
|
dynamic_pointer_cast<GaussianMixture>(isam[X(1)]->conditional()->inner());
|
||||||
auto actual_x1_conditional = dynamic_pointer_cast<GaussianMixture>(
|
auto expected_x1_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||||
(*expectedHybridBayesTree)[X(1)]->conditional()->inner());
|
(*expectedHybridBayesTree)[X(1)]->conditional()->inner());
|
||||||
EXPECT(assert_equal(*x1_conditional, *actual_x1_conditional));
|
EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional));
|
||||||
|
|
||||||
// The densities on X(2) should be the same
|
// The densities on X(2) should be the same
|
||||||
auto x2_conditional =
|
auto x2_conditional =
|
||||||
dynamic_pointer_cast<GaussianMixture>(isam[X(2)]->conditional()->inner());
|
dynamic_pointer_cast<GaussianMixture>(isam[X(2)]->conditional()->inner());
|
||||||
auto actual_x2_conditional = dynamic_pointer_cast<GaussianMixture>(
|
auto expected_x2_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||||
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
|
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
|
||||||
EXPECT(assert_equal(*x2_conditional, *actual_x2_conditional));
|
EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional));
|
||||||
|
|
||||||
// The densities on X(3) should be the same
|
|
||||||
auto x3_conditional =
|
|
||||||
dynamic_pointer_cast<GaussianMixture>(isam[X(3)]->conditional()->inner());
|
|
||||||
auto actual_x3_conditional = dynamic_pointer_cast<GaussianMixture>(
|
|
||||||
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
|
|
||||||
EXPECT(assert_equal(*x3_conditional, *actual_x3_conditional));
|
|
||||||
|
|
||||||
// We only perform manual continuous elimination for 0,0.
|
// We only perform manual continuous elimination for 0,0.
|
||||||
// The other discrete probabilities on M(2) are calculated the same way
|
// The other discrete probabilities on M(2) are calculated the same way
|
||||||
Ordering discrete_ordering;
|
Ordering discrete_ordering;
|
||||||
|
discrete_ordering += M(0);
|
||||||
discrete_ordering += M(1);
|
discrete_ordering += M(1);
|
||||||
discrete_ordering += M(2);
|
|
||||||
HybridBayesTree::shared_ptr discreteBayesTree =
|
HybridBayesTree::shared_ptr discreteBayesTree =
|
||||||
expectedRemainingGraph->eliminateMultifrontal(discrete_ordering);
|
expectedRemainingGraph->eliminateMultifrontal(discrete_ordering);
|
||||||
|
|
||||||
DiscreteValues m00;
|
DiscreteValues m00;
|
||||||
m00[M(1)] = 0, m00[M(2)] = 0;
|
m00[M(0)] = 0, m00[M(1)] = 0;
|
||||||
DiscreteConditional decisionTree =
|
DiscreteConditional decisionTree =
|
||||||
*(*discreteBayesTree)[M(2)]->conditional()->asDiscreteConditional();
|
*(*discreteBayesTree)[M(1)]->conditional()->asDiscreteConditional();
|
||||||
double m00_prob = decisionTree(m00);
|
double m00_prob = decisionTree(m00);
|
||||||
|
|
||||||
auto discreteConditional = isam[M(2)]->conditional()->asDiscreteConditional();
|
auto discreteConditional = isam[M(1)]->conditional()->asDiscreteConditional();
|
||||||
|
|
||||||
// Test if the probability values are as expected with regression tests.
|
// Test if the probability values are as expected with regression tests.
|
||||||
DiscreteValues assignment;
|
DiscreteValues assignment;
|
||||||
EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5));
|
EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5));
|
||||||
|
assignment[M(0)] = 0;
|
||||||
assignment[M(1)] = 0;
|
assignment[M(1)] = 0;
|
||||||
assignment[M(2)] = 0;
|
|
||||||
EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5));
|
EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5));
|
||||||
assignment[M(1)] = 1;
|
assignment[M(0)] = 1;
|
||||||
assignment[M(2)] = 0;
|
|
||||||
EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5));
|
|
||||||
assignment[M(1)] = 0;
|
assignment[M(1)] = 0;
|
||||||
assignment[M(2)] = 1;
|
EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5));
|
||||||
EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5));
|
assignment[M(0)] = 0;
|
||||||
|
assignment[M(1)] = 1;
|
||||||
|
EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5));
|
||||||
|
assignment[M(0)] = 1;
|
||||||
assignment[M(1)] = 1;
|
assignment[M(1)] = 1;
|
||||||
assignment[M(2)] = 1;
|
|
||||||
EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5));
|
EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5));
|
||||||
|
|
||||||
// Check if the clique conditional generated from incremental elimination
|
// Check if the clique conditional generated from incremental elimination
|
||||||
// matches that of batch elimination.
|
// matches that of batch elimination.
|
||||||
auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal();
|
auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal();
|
||||||
auto expectedConditional = dynamic_pointer_cast<DecisionTreeFactor>(
|
auto expectedConditional = dynamic_pointer_cast<DecisionTreeFactor>(
|
||||||
(*expectedChordal)[M(2)]->conditional()->inner());
|
(*expectedChordal)[M(1)]->conditional()->inner());
|
||||||
auto actualConditional = dynamic_pointer_cast<DecisionTreeFactor>(
|
auto actualConditional = dynamic_pointer_cast<DecisionTreeFactor>(
|
||||||
isam[M(2)]->conditional()->inner());
|
isam[M(1)]->conditional()->inner());
|
||||||
EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6));
|
EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -208,13 +208,13 @@ TEST(HybridGaussianElimination, Approx_inference) {
|
||||||
HybridGaussianISAM incrementalHybrid;
|
HybridGaussianISAM incrementalHybrid;
|
||||||
HybridGaussianFactorGraph graph1;
|
HybridGaussianFactorGraph graph1;
|
||||||
|
|
||||||
// Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4
|
// Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3
|
||||||
for (size_t i = 1; i < 4; i++) {
|
for (size_t i = 1; i < 4; i++) {
|
||||||
graph1.push_back(switching.linearizedFactorGraph.at(i));
|
graph1.push_back(switching.linearizedFactorGraph.at(i));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add the Gaussian factors, 1 prior on X(1),
|
// Add the Gaussian factors, 1 prior on X(0),
|
||||||
// 3 measurements on X(2), X(3), X(4)
|
// 3 measurements on X(1), X(2), X(3)
|
||||||
graph1.push_back(switching.linearizedFactorGraph.at(0));
|
graph1.push_back(switching.linearizedFactorGraph.at(0));
|
||||||
for (size_t i = 4; i <= 7; i++) {
|
for (size_t i = 4; i <= 7; i++) {
|
||||||
graph1.push_back(switching.linearizedFactorGraph.at(i));
|
graph1.push_back(switching.linearizedFactorGraph.at(i));
|
||||||
|
@ -222,7 +222,7 @@ TEST(HybridGaussianElimination, Approx_inference) {
|
||||||
|
|
||||||
// Create ordering.
|
// Create ordering.
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
for (size_t j = 1; j <= 4; j++) {
|
for (size_t j = 0; j < 4; j++) {
|
||||||
ordering += X(j);
|
ordering += X(j);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -271,26 +271,26 @@ TEST(HybridGaussianElimination, Approx_inference) {
|
||||||
1 1 1 Leaf 0.5
|
1 1 1 Leaf 0.5
|
||||||
*/
|
*/
|
||||||
|
|
||||||
auto discreteConditional_m1 = *dynamic_pointer_cast<DiscreteConditional>(
|
auto discreteConditional_m0 = *dynamic_pointer_cast<DiscreteConditional>(
|
||||||
incrementalHybrid[M(1)]->conditional()->inner());
|
incrementalHybrid[M(0)]->conditional()->inner());
|
||||||
EXPECT(discreteConditional_m1.keys() == KeyVector({M(1), M(2), M(3)}));
|
EXPECT(discreteConditional_m0.keys() == KeyVector({M(0), M(1), M(2)}));
|
||||||
|
|
||||||
// Get the number of elements which are greater than 0.
|
// Get the number of elements which are greater than 0.
|
||||||
auto count = [](const double &value, int count) {
|
auto count = [](const double &value, int count) {
|
||||||
return value > 0 ? count + 1 : count;
|
return value > 0 ? count + 1 : count;
|
||||||
};
|
};
|
||||||
// Check that the number of leaves after pruning is 5.
|
// Check that the number of leaves after pruning is 5.
|
||||||
EXPECT_LONGS_EQUAL(5, discreteConditional_m1.fold(count, 0));
|
EXPECT_LONGS_EQUAL(5, discreteConditional_m0.fold(count, 0));
|
||||||
|
|
||||||
// Check that the hybrid nodes of the bayes net match those of the pre-pruning
|
// Check that the hybrid nodes of the bayes net match those of the pre-pruning
|
||||||
// bayes net, at the same positions.
|
// bayes net, at the same positions.
|
||||||
auto &unprunedLastDensity = *dynamic_pointer_cast<GaussianMixture>(
|
auto &unprunedLastDensity = *dynamic_pointer_cast<GaussianMixture>(
|
||||||
unprunedHybridBayesTree->clique(X(4))->conditional()->inner());
|
unprunedHybridBayesTree->clique(X(3))->conditional()->inner());
|
||||||
auto &lastDensity = *dynamic_pointer_cast<GaussianMixture>(
|
auto &lastDensity = *dynamic_pointer_cast<GaussianMixture>(
|
||||||
incrementalHybrid[X(4)]->conditional()->inner());
|
incrementalHybrid[X(3)]->conditional()->inner());
|
||||||
|
|
||||||
std::vector<std::pair<DiscreteValues, double>> assignments =
|
std::vector<std::pair<DiscreteValues, double>> assignments =
|
||||||
discreteConditional_m1.enumerate();
|
discreteConditional_m0.enumerate();
|
||||||
// Loop over all assignments and check the pruned components
|
// Loop over all assignments and check the pruned components
|
||||||
for (auto &&av : assignments) {
|
for (auto &&av : assignments) {
|
||||||
const DiscreteValues &assignment = av.first;
|
const DiscreteValues &assignment = av.first;
|
||||||
|
@ -314,13 +314,13 @@ TEST(HybridGaussianElimination, Incremental_approximate) {
|
||||||
HybridGaussianFactorGraph graph1;
|
HybridGaussianFactorGraph graph1;
|
||||||
|
|
||||||
/***** Run Round 1 *****/
|
/***** Run Round 1 *****/
|
||||||
// Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4
|
// Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3
|
||||||
for (size_t i = 1; i < 4; i++) {
|
for (size_t i = 1; i < 4; i++) {
|
||||||
graph1.push_back(switching.linearizedFactorGraph.at(i));
|
graph1.push_back(switching.linearizedFactorGraph.at(i));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add the Gaussian factors, 1 prior on X(1),
|
// Add the Gaussian factors, 1 prior on X(0),
|
||||||
// 3 measurements on X(2), X(3), X(4)
|
// 3 measurements on X(1), X(2), X(3)
|
||||||
graph1.push_back(switching.linearizedFactorGraph.at(0));
|
graph1.push_back(switching.linearizedFactorGraph.at(0));
|
||||||
for (size_t i = 5; i <= 7; i++) {
|
for (size_t i = 5; i <= 7; i++) {
|
||||||
graph1.push_back(switching.linearizedFactorGraph.at(i));
|
graph1.push_back(switching.linearizedFactorGraph.at(i));
|
||||||
|
@ -335,13 +335,13 @@ TEST(HybridGaussianElimination, Incremental_approximate) {
|
||||||
// each with 2, 4, 8, and 5 (pruned) leaves respetively.
|
// each with 2, 4, 8, and 5 (pruned) leaves respetively.
|
||||||
EXPECT_LONGS_EQUAL(4, incrementalHybrid.size());
|
EXPECT_LONGS_EQUAL(4, incrementalHybrid.size());
|
||||||
EXPECT_LONGS_EQUAL(
|
EXPECT_LONGS_EQUAL(
|
||||||
2, incrementalHybrid[X(1)]->conditional()->asMixture()->nrComponents());
|
2, incrementalHybrid[X(0)]->conditional()->asMixture()->nrComponents());
|
||||||
EXPECT_LONGS_EQUAL(
|
EXPECT_LONGS_EQUAL(
|
||||||
3, incrementalHybrid[X(2)]->conditional()->asMixture()->nrComponents());
|
3, incrementalHybrid[X(1)]->conditional()->asMixture()->nrComponents());
|
||||||
|
EXPECT_LONGS_EQUAL(
|
||||||
|
5, incrementalHybrid[X(2)]->conditional()->asMixture()->nrComponents());
|
||||||
EXPECT_LONGS_EQUAL(
|
EXPECT_LONGS_EQUAL(
|
||||||
5, incrementalHybrid[X(3)]->conditional()->asMixture()->nrComponents());
|
5, incrementalHybrid[X(3)]->conditional()->asMixture()->nrComponents());
|
||||||
EXPECT_LONGS_EQUAL(
|
|
||||||
5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents());
|
|
||||||
|
|
||||||
/***** Run Round 2 *****/
|
/***** Run Round 2 *****/
|
||||||
HybridGaussianFactorGraph graph2;
|
HybridGaussianFactorGraph graph2;
|
||||||
|
@ -356,9 +356,9 @@ TEST(HybridGaussianElimination, Incremental_approximate) {
|
||||||
// with 5 (pruned) leaves.
|
// with 5 (pruned) leaves.
|
||||||
CHECK_EQUAL(5, incrementalHybrid.size());
|
CHECK_EQUAL(5, incrementalHybrid.size());
|
||||||
EXPECT_LONGS_EQUAL(
|
EXPECT_LONGS_EQUAL(
|
||||||
5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents());
|
5, incrementalHybrid[X(3)]->conditional()->asMixture()->nrComponents());
|
||||||
EXPECT_LONGS_EQUAL(
|
EXPECT_LONGS_EQUAL(
|
||||||
5, incrementalHybrid[X(5)]->conditional()->asMixture()->nrComponents());
|
5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents());
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************/
|
/* ************************************************************************/
|
||||||
|
@ -370,7 +370,7 @@ TEST(HybridGaussianISAM, NonTrivial) {
|
||||||
/*************** Run Round 1 ***************/
|
/*************** Run Round 1 ***************/
|
||||||
HybridNonlinearFactorGraph fg;
|
HybridNonlinearFactorGraph fg;
|
||||||
|
|
||||||
// Add a prior on pose x1 at the origin.
|
// Add a prior on pose x0 at the origin.
|
||||||
// A prior factor consists of a mean and
|
// A prior factor consists of a mean and
|
||||||
// a noise model (covariance matrix)
|
// a noise model (covariance matrix)
|
||||||
Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin
|
Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin
|
||||||
|
|
|
@ -263,7 +263,7 @@ TEST(HybridFactorGraph, EliminationTree) {
|
||||||
|
|
||||||
// Create ordering.
|
// Create ordering.
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
for (size_t k = 1; k <= self.K; k++) ordering += X(k);
|
for (size_t k = 0; k < self.K; k++) ordering += X(k);
|
||||||
|
|
||||||
// Create elimination tree.
|
// Create elimination tree.
|
||||||
HybridEliminationTree etree(self.linearizedFactorGraph, ordering);
|
HybridEliminationTree etree(self.linearizedFactorGraph, ordering);
|
||||||
|
@ -271,9 +271,9 @@ TEST(HybridFactorGraph, EliminationTree) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/****************************************************************************
|
/****************************************************************************
|
||||||
*Test elimination function by eliminating x1 in *-x1-*-x2 graph.
|
*Test elimination function by eliminating x0 in *-x0-*-x1 graph.
|
||||||
*/
|
*/
|
||||||
TEST(GaussianElimination, Eliminate_x1) {
|
TEST(GaussianElimination, Eliminate_x0) {
|
||||||
Switching self(3);
|
Switching self(3);
|
||||||
|
|
||||||
// Gather factors on x1, has a simple Gaussian and a mixture factor.
|
// Gather factors on x1, has a simple Gaussian and a mixture factor.
|
||||||
|
@ -283,9 +283,9 @@ TEST(GaussianElimination, Eliminate_x1) {
|
||||||
// Add first hybrid factor
|
// Add first hybrid factor
|
||||||
factors.push_back(self.linearizedFactorGraph[1]);
|
factors.push_back(self.linearizedFactorGraph[1]);
|
||||||
|
|
||||||
// Eliminate x1
|
// Eliminate x0
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
ordering += X(1);
|
ordering += X(0);
|
||||||
|
|
||||||
auto result = EliminateHybrid(factors, ordering);
|
auto result = EliminateHybrid(factors, ordering);
|
||||||
CHECK(result.first);
|
CHECK(result.first);
|
||||||
|
@ -296,20 +296,20 @@ TEST(GaussianElimination, Eliminate_x1) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/****************************************************************************
|
/****************************************************************************
|
||||||
* Test elimination function by eliminating x2 in x1-*-x2-*-x3 chain.
|
* Test elimination function by eliminating x1 in x0-*-x1-*-x2 chain.
|
||||||
* m1/ \m2
|
* m0/ \m1
|
||||||
*/
|
*/
|
||||||
TEST(HybridsGaussianElimination, Eliminate_x2) {
|
TEST(HybridsGaussianElimination, Eliminate_x1) {
|
||||||
Switching self(3);
|
Switching self(3);
|
||||||
|
|
||||||
// Gather factors on x2, will be two mixture factors (with x1 and x3, resp.).
|
// Gather factors on x1, will be two mixture factors (with x0 and x2, resp.).
|
||||||
HybridGaussianFactorGraph factors;
|
HybridGaussianFactorGraph factors;
|
||||||
factors.push_back(self.linearizedFactorGraph[1]); // involves m1
|
factors.push_back(self.linearizedFactorGraph[1]); // involves m0
|
||||||
factors.push_back(self.linearizedFactorGraph[2]); // involves m2
|
factors.push_back(self.linearizedFactorGraph[2]); // involves m1
|
||||||
|
|
||||||
// Eliminate x2
|
// Eliminate x1
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
ordering += X(2);
|
ordering += X(1);
|
||||||
|
|
||||||
std::pair<HybridConditional::shared_ptr, HybridFactor::shared_ptr> result =
|
std::pair<HybridConditional::shared_ptr, HybridFactor::shared_ptr> result =
|
||||||
EliminateHybrid(factors, ordering);
|
EliminateHybrid(factors, ordering);
|
||||||
|
@ -326,28 +326,28 @@ TEST(HybridsGaussianElimination, Eliminate_x2) {
|
||||||
GaussianFactorGraph::shared_ptr batchGFG(double between,
|
GaussianFactorGraph::shared_ptr batchGFG(double between,
|
||||||
Values linearizationPoint) {
|
Values linearizationPoint) {
|
||||||
NonlinearFactorGraph graph;
|
NonlinearFactorGraph graph;
|
||||||
graph.addPrior<double>(X(1), 0, Isotropic::Sigma(1, 0.1));
|
graph.addPrior<double>(X(0), 0, Isotropic::Sigma(1, 0.1));
|
||||||
|
|
||||||
auto between_x1_x2 = boost::make_shared<MotionModel>(
|
auto between_x0_x1 = boost::make_shared<MotionModel>(
|
||||||
X(1), X(2), between, Isotropic::Sigma(1, 1.0));
|
X(0), X(1), between, Isotropic::Sigma(1, 1.0));
|
||||||
|
|
||||||
graph.push_back(between_x1_x2);
|
graph.push_back(between_x0_x1);
|
||||||
|
|
||||||
return graph.linearize(linearizationPoint);
|
return graph.linearize(linearizationPoint);
|
||||||
}
|
}
|
||||||
|
|
||||||
/****************************************************************************
|
/****************************************************************************
|
||||||
* Test elimination function by eliminating x1 and x2 in graph.
|
* Test elimination function by eliminating x0 and x1 in graph.
|
||||||
*/
|
*/
|
||||||
TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) {
|
TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) {
|
||||||
Switching self(2, 1.0, 0.1);
|
Switching self(2, 1.0, 0.1);
|
||||||
|
|
||||||
auto factors = self.linearizedFactorGraph;
|
auto factors = self.linearizedFactorGraph;
|
||||||
|
|
||||||
// Eliminate x1
|
// Eliminate x0
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
|
ordering += X(0);
|
||||||
ordering += X(1);
|
ordering += X(1);
|
||||||
ordering += X(2);
|
|
||||||
|
|
||||||
HybridConditional::shared_ptr hybridConditionalMixture;
|
HybridConditional::shared_ptr hybridConditionalMixture;
|
||||||
HybridFactor::shared_ptr factorOnModes;
|
HybridFactor::shared_ptr factorOnModes;
|
||||||
|
@ -359,7 +359,7 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) {
|
||||||
dynamic_pointer_cast<GaussianMixture>(hybridConditionalMixture->inner());
|
dynamic_pointer_cast<GaussianMixture>(hybridConditionalMixture->inner());
|
||||||
|
|
||||||
CHECK(gaussianConditionalMixture);
|
CHECK(gaussianConditionalMixture);
|
||||||
// Frontals = [x1, x2]
|
// Frontals = [x0, x1]
|
||||||
EXPECT_LONGS_EQUAL(2, gaussianConditionalMixture->nrFrontals());
|
EXPECT_LONGS_EQUAL(2, gaussianConditionalMixture->nrFrontals());
|
||||||
// 1 parent, which is the mode
|
// 1 parent, which is the mode
|
||||||
EXPECT_LONGS_EQUAL(1, gaussianConditionalMixture->nrParents());
|
EXPECT_LONGS_EQUAL(1, gaussianConditionalMixture->nrParents());
|
||||||
|
@ -387,7 +387,7 @@ TEST(HybridFactorGraph, Partial_Elimination) {
|
||||||
|
|
||||||
// Create ordering.
|
// Create ordering.
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
for (size_t k = 1; k <= self.K; k++) ordering += X(k);
|
for (size_t k = 0; k < self.K; k++) ordering += X(k);
|
||||||
|
|
||||||
// Eliminate partially.
|
// Eliminate partially.
|
||||||
HybridBayesNet::shared_ptr hybridBayesNet;
|
HybridBayesNet::shared_ptr hybridBayesNet;
|
||||||
|
@ -397,18 +397,18 @@ TEST(HybridFactorGraph, Partial_Elimination) {
|
||||||
|
|
||||||
CHECK(hybridBayesNet);
|
CHECK(hybridBayesNet);
|
||||||
EXPECT_LONGS_EQUAL(3, hybridBayesNet->size());
|
EXPECT_LONGS_EQUAL(3, hybridBayesNet->size());
|
||||||
EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)});
|
EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(0)});
|
||||||
EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)}));
|
EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(1), M(0)}));
|
||||||
EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(2)});
|
EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(1)});
|
||||||
EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(3), M(1), M(2)}));
|
EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(2), M(0), M(1)}));
|
||||||
EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(3)});
|
EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(2)});
|
||||||
EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(1), M(2)}));
|
EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(0), M(1)}));
|
||||||
|
|
||||||
CHECK(remainingFactorGraph);
|
CHECK(remainingFactorGraph);
|
||||||
EXPECT_LONGS_EQUAL(3, remainingFactorGraph->size());
|
EXPECT_LONGS_EQUAL(3, remainingFactorGraph->size());
|
||||||
EXPECT(remainingFactorGraph->at(0)->keys() == KeyVector({M(1)}));
|
EXPECT(remainingFactorGraph->at(0)->keys() == KeyVector({M(0)}));
|
||||||
EXPECT(remainingFactorGraph->at(1)->keys() == KeyVector({M(2), M(1)}));
|
EXPECT(remainingFactorGraph->at(1)->keys() == KeyVector({M(1), M(0)}));
|
||||||
EXPECT(remainingFactorGraph->at(2)->keys() == KeyVector({M(1), M(2)}));
|
EXPECT(remainingFactorGraph->at(2)->keys() == KeyVector({M(0), M(1)}));
|
||||||
}
|
}
|
||||||
|
|
||||||
/****************************************************************************
|
/****************************************************************************
|
||||||
|
@ -427,7 +427,7 @@ TEST(HybridFactorGraph, Full_Elimination) {
|
||||||
{
|
{
|
||||||
// Create ordering.
|
// Create ordering.
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
for (size_t k = 1; k <= self.K; k++) ordering += X(k);
|
for (size_t k = 0; k < self.K; k++) ordering += X(k);
|
||||||
|
|
||||||
// Eliminate partially.
|
// Eliminate partially.
|
||||||
std::tie(hybridBayesNet_partial, remainingFactorGraph_partial) =
|
std::tie(hybridBayesNet_partial, remainingFactorGraph_partial) =
|
||||||
|
@ -440,15 +440,15 @@ TEST(HybridFactorGraph, Full_Elimination) {
|
||||||
discrete_fg.push_back(df->inner());
|
discrete_fg.push_back(df->inner());
|
||||||
}
|
}
|
||||||
ordering.clear();
|
ordering.clear();
|
||||||
for (size_t k = 1; k < self.K; k++) ordering += M(k);
|
for (size_t k = 0; k < self.K - 1; k++) ordering += M(k);
|
||||||
discreteBayesNet =
|
discreteBayesNet =
|
||||||
*discrete_fg.eliminateSequential(ordering, EliminateForMPE);
|
*discrete_fg.eliminateSequential(ordering, EliminateForMPE);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create ordering.
|
// Create ordering.
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
for (size_t k = 1; k <= self.K; k++) ordering += X(k);
|
for (size_t k = 0; k < self.K; k++) ordering += X(k);
|
||||||
for (size_t k = 1; k < self.K; k++) ordering += M(k);
|
for (size_t k = 0; k < self.K - 1; k++) ordering += M(k);
|
||||||
|
|
||||||
// Eliminate partially.
|
// Eliminate partially.
|
||||||
HybridBayesNet::shared_ptr hybridBayesNet =
|
HybridBayesNet::shared_ptr hybridBayesNet =
|
||||||
|
@ -456,23 +456,23 @@ TEST(HybridFactorGraph, Full_Elimination) {
|
||||||
|
|
||||||
CHECK(hybridBayesNet);
|
CHECK(hybridBayesNet);
|
||||||
EXPECT_LONGS_EQUAL(5, hybridBayesNet->size());
|
EXPECT_LONGS_EQUAL(5, hybridBayesNet->size());
|
||||||
// p(x1 | x2, m1)
|
// p(x0 | x1, m0)
|
||||||
EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)});
|
EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(0)});
|
||||||
EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)}));
|
EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(1), M(0)}));
|
||||||
// p(x2 | x3, m1, m2)
|
// p(x1 | x2, m0, m1)
|
||||||
EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(2)});
|
EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(1)});
|
||||||
EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(3), M(1), M(2)}));
|
EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(2), M(0), M(1)}));
|
||||||
// p(x3 | m1, m2)
|
// p(x2 | m0, m1)
|
||||||
EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(3)});
|
EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(2)});
|
||||||
EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(1), M(2)}));
|
EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(0), M(1)}));
|
||||||
// P(m1 | m2)
|
// P(m0 | m1)
|
||||||
EXPECT(hybridBayesNet->at(3)->frontals() == KeyVector{M(1)});
|
EXPECT(hybridBayesNet->at(3)->frontals() == KeyVector{M(0)});
|
||||||
EXPECT(hybridBayesNet->at(3)->parents() == KeyVector({M(2)}));
|
EXPECT(hybridBayesNet->at(3)->parents() == KeyVector({M(1)}));
|
||||||
EXPECT(
|
EXPECT(
|
||||||
dynamic_pointer_cast<DiscreteConditional>(hybridBayesNet->at(3)->inner())
|
dynamic_pointer_cast<DiscreteConditional>(hybridBayesNet->at(3)->inner())
|
||||||
->equals(*discreteBayesNet.at(0)));
|
->equals(*discreteBayesNet.at(0)));
|
||||||
// P(m2)
|
// P(m1)
|
||||||
EXPECT(hybridBayesNet->at(4)->frontals() == KeyVector{M(2)});
|
EXPECT(hybridBayesNet->at(4)->frontals() == KeyVector{M(1)});
|
||||||
EXPECT_LONGS_EQUAL(0, hybridBayesNet->at(4)->nrParents());
|
EXPECT_LONGS_EQUAL(0, hybridBayesNet->at(4)->nrParents());
|
||||||
EXPECT(
|
EXPECT(
|
||||||
dynamic_pointer_cast<DiscreteConditional>(hybridBayesNet->at(4)->inner())
|
dynamic_pointer_cast<DiscreteConditional>(hybridBayesNet->at(4)->inner())
|
||||||
|
@ -489,7 +489,7 @@ TEST(HybridFactorGraph, Printing) {
|
||||||
|
|
||||||
// Create ordering.
|
// Create ordering.
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
for (size_t k = 1; k <= self.K; k++) ordering += X(k);
|
for (size_t k = 0; k < self.K; k++) ordering += X(k);
|
||||||
|
|
||||||
// Eliminate partially.
|
// Eliminate partially.
|
||||||
HybridBayesNet::shared_ptr hybridBayesNet;
|
HybridBayesNet::shared_ptr hybridBayesNet;
|
||||||
|
@ -499,14 +499,37 @@ TEST(HybridFactorGraph, Printing) {
|
||||||
|
|
||||||
string expected_hybridFactorGraph = R"(
|
string expected_hybridFactorGraph = R"(
|
||||||
size: 7
|
size: 7
|
||||||
factor 0: Continuous [x1]
|
factor 0: Continuous [x0]
|
||||||
|
|
||||||
A[x1] = [
|
A[x0] = [
|
||||||
10
|
10
|
||||||
]
|
]
|
||||||
b = [ -10 ]
|
b = [ -10 ]
|
||||||
No noise model
|
No noise model
|
||||||
factor 1: Hybrid [x1 x2; m1]{
|
factor 1: Hybrid [x0 x1; m0]{
|
||||||
|
Choice(m0)
|
||||||
|
0 Leaf :
|
||||||
|
A[x0] = [
|
||||||
|
-1
|
||||||
|
]
|
||||||
|
A[x1] = [
|
||||||
|
1
|
||||||
|
]
|
||||||
|
b = [ -1 ]
|
||||||
|
No noise model
|
||||||
|
|
||||||
|
1 Leaf :
|
||||||
|
A[x0] = [
|
||||||
|
-1
|
||||||
|
]
|
||||||
|
A[x1] = [
|
||||||
|
1
|
||||||
|
]
|
||||||
|
b = [ -0 ]
|
||||||
|
No noise model
|
||||||
|
|
||||||
|
}
|
||||||
|
factor 2: Hybrid [x1 x2; m1]{
|
||||||
Choice(m1)
|
Choice(m1)
|
||||||
0 Leaf :
|
0 Leaf :
|
||||||
A[x1] = [
|
A[x1] = [
|
||||||
|
@ -529,54 +552,31 @@ factor 1: Hybrid [x1 x2; m1]{
|
||||||
No noise model
|
No noise model
|
||||||
|
|
||||||
}
|
}
|
||||||
factor 2: Hybrid [x2 x3; m2]{
|
factor 3: Continuous [x1]
|
||||||
Choice(m2)
|
|
||||||
0 Leaf :
|
|
||||||
A[x2] = [
|
|
||||||
-1
|
|
||||||
]
|
|
||||||
A[x3] = [
|
|
||||||
1
|
|
||||||
]
|
|
||||||
b = [ -1 ]
|
|
||||||
No noise model
|
|
||||||
|
|
||||||
1 Leaf :
|
A[x1] = [
|
||||||
A[x2] = [
|
10
|
||||||
-1
|
|
||||||
]
|
]
|
||||||
A[x3] = [
|
b = [ -10 ]
|
||||||
1
|
|
||||||
]
|
|
||||||
b = [ -0 ]
|
|
||||||
No noise model
|
No noise model
|
||||||
|
factor 4: Continuous [x2]
|
||||||
}
|
|
||||||
factor 3: Continuous [x2]
|
|
||||||
|
|
||||||
A[x2] = [
|
A[x2] = [
|
||||||
10
|
10
|
||||||
]
|
]
|
||||||
b = [ -10 ]
|
b = [ -10 ]
|
||||||
No noise model
|
No noise model
|
||||||
factor 4: Continuous [x3]
|
factor 5: Discrete [m0]
|
||||||
|
P( m0 ):
|
||||||
A[x3] = [
|
|
||||||
10
|
|
||||||
]
|
|
||||||
b = [ -10 ]
|
|
||||||
No noise model
|
|
||||||
factor 5: Discrete [m1]
|
|
||||||
P( m1 ):
|
|
||||||
Leaf 0.5
|
Leaf 0.5
|
||||||
|
|
||||||
factor 6: Discrete [m2 m1]
|
factor 6: Discrete [m1 m0]
|
||||||
P( m2 | m1 ):
|
P( m1 | m0 ):
|
||||||
Choice(m2)
|
Choice(m1)
|
||||||
0 Choice(m1)
|
0 Choice(m0)
|
||||||
0 0 Leaf 0.33333333
|
0 0 Leaf 0.33333333
|
||||||
0 1 Leaf 0.6
|
0 1 Leaf 0.6
|
||||||
1 Choice(m1)
|
1 Choice(m0)
|
||||||
1 0 Leaf 0.66666667
|
1 0 Leaf 0.66666667
|
||||||
1 1 Leaf 0.4
|
1 1 Leaf 0.4
|
||||||
|
|
||||||
|
@ -586,71 +586,71 @@ factor 6: Discrete [m2 m1]
|
||||||
// Expected output for hybridBayesNet.
|
// Expected output for hybridBayesNet.
|
||||||
string expected_hybridBayesNet = R"(
|
string expected_hybridBayesNet = R"(
|
||||||
size: 3
|
size: 3
|
||||||
factor 0: Hybrid P( x1 | x2 m1)
|
factor 0: Hybrid P( x0 | x1 m0)
|
||||||
Discrete Keys = (m1, 2),
|
Discrete Keys = (m0, 2),
|
||||||
Choice(m1)
|
Choice(m0)
|
||||||
0 Leaf p(x1 | x2)
|
0 Leaf p(x0 | x1)
|
||||||
R = [ 10.0499 ]
|
R = [ 10.0499 ]
|
||||||
S[x2] = [ -0.0995037 ]
|
S[x1] = [ -0.0995037 ]
|
||||||
d = [ -9.85087 ]
|
d = [ -9.85087 ]
|
||||||
No noise model
|
No noise model
|
||||||
|
|
||||||
1 Leaf p(x1 | x2)
|
1 Leaf p(x0 | x1)
|
||||||
R = [ 10.0499 ]
|
R = [ 10.0499 ]
|
||||||
S[x2] = [ -0.0995037 ]
|
S[x1] = [ -0.0995037 ]
|
||||||
d = [ -9.95037 ]
|
d = [ -9.95037 ]
|
||||||
No noise model
|
No noise model
|
||||||
|
|
||||||
factor 1: Hybrid P( x2 | x3 m1 m2)
|
factor 1: Hybrid P( x1 | x2 m0 m1)
|
||||||
Discrete Keys = (m1, 2), (m2, 2),
|
Discrete Keys = (m0, 2), (m1, 2),
|
||||||
Choice(m2)
|
Choice(m1)
|
||||||
0 Choice(m1)
|
0 Choice(m0)
|
||||||
0 0 Leaf p(x2 | x3)
|
0 0 Leaf p(x1 | x2)
|
||||||
R = [ 10.099 ]
|
R = [ 10.099 ]
|
||||||
S[x3] = [ -0.0990196 ]
|
S[x2] = [ -0.0990196 ]
|
||||||
d = [ -9.99901 ]
|
d = [ -9.99901 ]
|
||||||
No noise model
|
No noise model
|
||||||
|
|
||||||
0 1 Leaf p(x2 | x3)
|
0 1 Leaf p(x1 | x2)
|
||||||
R = [ 10.099 ]
|
R = [ 10.099 ]
|
||||||
S[x3] = [ -0.0990196 ]
|
S[x2] = [ -0.0990196 ]
|
||||||
d = [ -9.90098 ]
|
d = [ -9.90098 ]
|
||||||
No noise model
|
No noise model
|
||||||
|
|
||||||
1 Choice(m1)
|
1 Choice(m0)
|
||||||
1 0 Leaf p(x2 | x3)
|
1 0 Leaf p(x1 | x2)
|
||||||
R = [ 10.099 ]
|
R = [ 10.099 ]
|
||||||
S[x3] = [ -0.0990196 ]
|
S[x2] = [ -0.0990196 ]
|
||||||
d = [ -10.098 ]
|
d = [ -10.098 ]
|
||||||
No noise model
|
No noise model
|
||||||
|
|
||||||
1 1 Leaf p(x2 | x3)
|
1 1 Leaf p(x1 | x2)
|
||||||
R = [ 10.099 ]
|
R = [ 10.099 ]
|
||||||
S[x3] = [ -0.0990196 ]
|
S[x2] = [ -0.0990196 ]
|
||||||
d = [ -10 ]
|
d = [ -10 ]
|
||||||
No noise model
|
No noise model
|
||||||
|
|
||||||
factor 2: Hybrid P( x3 | m1 m2)
|
factor 2: Hybrid P( x2 | m0 m1)
|
||||||
Discrete Keys = (m1, 2), (m2, 2),
|
Discrete Keys = (m0, 2), (m1, 2),
|
||||||
Choice(m2)
|
Choice(m1)
|
||||||
0 Choice(m1)
|
0 Choice(m0)
|
||||||
0 0 Leaf p(x3)
|
0 0 Leaf p(x2)
|
||||||
R = [ 10.0494 ]
|
R = [ 10.0494 ]
|
||||||
d = [ -10.1489 ]
|
d = [ -10.1489 ]
|
||||||
No noise model
|
No noise model
|
||||||
|
|
||||||
0 1 Leaf p(x3)
|
0 1 Leaf p(x2)
|
||||||
R = [ 10.0494 ]
|
R = [ 10.0494 ]
|
||||||
d = [ -10.1479 ]
|
d = [ -10.1479 ]
|
||||||
No noise model
|
No noise model
|
||||||
|
|
||||||
1 Choice(m1)
|
1 Choice(m0)
|
||||||
1 0 Leaf p(x3)
|
1 0 Leaf p(x2)
|
||||||
R = [ 10.0494 ]
|
R = [ 10.0494 ]
|
||||||
d = [ -10.0504 ]
|
d = [ -10.0504 ]
|
||||||
No noise model
|
No noise model
|
||||||
|
|
||||||
1 1 Leaf p(x3)
|
1 1 Leaf p(x2)
|
||||||
R = [ 10.0494 ]
|
R = [ 10.0494 ]
|
||||||
d = [ -10.0494 ]
|
d = [ -10.0494 ]
|
||||||
No noise model
|
No noise model
|
||||||
|
@ -669,7 +669,7 @@ factor 2: Hybrid P( x3 | m1 m2)
|
||||||
TEST(HybridFactorGraph, DefaultDecisionTree) {
|
TEST(HybridFactorGraph, DefaultDecisionTree) {
|
||||||
HybridNonlinearFactorGraph fg;
|
HybridNonlinearFactorGraph fg;
|
||||||
|
|
||||||
// Add a prior on pose x1 at the origin.
|
// Add a prior on pose x0 at the origin.
|
||||||
// A prior factor consists of a mean and a noise model (covariance matrix)
|
// A prior factor consists of a mean and a noise model (covariance matrix)
|
||||||
Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin
|
Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin
|
||||||
auto priorNoise = noiseModel::Diagonal::Sigmas(
|
auto priorNoise = noiseModel::Diagonal::Sigmas(
|
||||||
|
|
|
@ -55,47 +55,47 @@ TEST(HybridNonlinearISAM, IncrementalElimination) {
|
||||||
// Create initial factor graph
|
// Create initial factor graph
|
||||||
// * * *
|
// * * *
|
||||||
// | | |
|
// | | |
|
||||||
// X1 -*- X2 -*- X3
|
// X0 -*- X1 -*- X2
|
||||||
// \*-M1-*/
|
// \*-M0-*/
|
||||||
graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X1)
|
graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X0)
|
||||||
graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X1, X2 | M1)
|
graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X0, X1 | M0)
|
||||||
graph1.push_back(switching.nonlinearFactorGraph.at(2)); // P(X2, X3 | M2)
|
graph1.push_back(switching.nonlinearFactorGraph.at(2)); // P(X1, X2 | M1)
|
||||||
graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M1)
|
graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M0)
|
||||||
|
|
||||||
initial.insert<double>(X(1), 1);
|
initial.insert<double>(X(0), 1);
|
||||||
initial.insert<double>(X(2), 2);
|
initial.insert<double>(X(1), 2);
|
||||||
initial.insert<double>(X(3), 3);
|
initial.insert<double>(X(2), 3);
|
||||||
|
|
||||||
// Run update step
|
// Run update step
|
||||||
isam.update(graph1, initial);
|
isam.update(graph1, initial);
|
||||||
|
|
||||||
// Check that after update we have 3 hybrid Bayes net nodes:
|
// Check that after update we have 3 hybrid Bayes net nodes:
|
||||||
// P(X1 | X2, M1) and P(X2, X3 | M1, M2), P(M1, M2)
|
// P(X0 | X1, M0) and P(X1, X2 | M0, M1), P(M0, M1)
|
||||||
HybridGaussianISAM bayesTree = isam.bayesTree();
|
HybridGaussianISAM bayesTree = isam.bayesTree();
|
||||||
EXPECT_LONGS_EQUAL(3, bayesTree.size());
|
EXPECT_LONGS_EQUAL(3, bayesTree.size());
|
||||||
EXPECT(bayesTree[X(1)]->conditional()->frontals() == KeyVector{X(1)});
|
EXPECT(bayesTree[X(0)]->conditional()->frontals() == KeyVector{X(0)});
|
||||||
EXPECT(bayesTree[X(1)]->conditional()->parents() == KeyVector({X(2), M(1)}));
|
EXPECT(bayesTree[X(0)]->conditional()->parents() == KeyVector({X(1), M(0)}));
|
||||||
EXPECT(bayesTree[X(2)]->conditional()->frontals() == KeyVector({X(2), X(3)}));
|
EXPECT(bayesTree[X(1)]->conditional()->frontals() == KeyVector({X(1), X(2)}));
|
||||||
EXPECT(bayesTree[X(2)]->conditional()->parents() == KeyVector({M(1), M(2)}));
|
EXPECT(bayesTree[X(1)]->conditional()->parents() == KeyVector({M(0), M(1)}));
|
||||||
|
|
||||||
/********************************************************/
|
/********************************************************/
|
||||||
// New factor graph for incremental update.
|
// New factor graph for incremental update.
|
||||||
HybridNonlinearFactorGraph graph2;
|
HybridNonlinearFactorGraph graph2;
|
||||||
initial = Values();
|
initial = Values();
|
||||||
|
|
||||||
graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X2)
|
graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X1)
|
||||||
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X3)
|
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X2)
|
||||||
graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M1, M2)
|
graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M0, M1)
|
||||||
|
|
||||||
isam.update(graph2, initial);
|
isam.update(graph2, initial);
|
||||||
|
|
||||||
bayesTree = isam.bayesTree();
|
bayesTree = isam.bayesTree();
|
||||||
// Check that after the second update we have
|
// Check that after the second update we have
|
||||||
// 1 additional hybrid Bayes net node:
|
// 1 additional hybrid Bayes net node:
|
||||||
// P(X2, X3 | M1, M2)
|
// P(X1, X2 | M0, M1)
|
||||||
EXPECT_LONGS_EQUAL(3, bayesTree.size());
|
EXPECT_LONGS_EQUAL(3, bayesTree.size());
|
||||||
EXPECT(bayesTree[X(3)]->conditional()->frontals() == KeyVector({X(2), X(3)}));
|
EXPECT(bayesTree[X(2)]->conditional()->frontals() == KeyVector({X(1), X(2)}));
|
||||||
EXPECT(bayesTree[X(3)]->conditional()->parents() == KeyVector({M(1), M(2)}));
|
EXPECT(bayesTree[X(2)]->conditional()->parents() == KeyVector({M(0), M(1)}));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ****************************************************************************/
|
/* ****************************************************************************/
|
||||||
|
@ -109,35 +109,35 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
|
||||||
// Create initial factor graph
|
// Create initial factor graph
|
||||||
// * * *
|
// * * *
|
||||||
// | | |
|
// | | |
|
||||||
// X1 -*- X2 -*- X3
|
// X0 -*- X1 -*- X2
|
||||||
// | |
|
// | |
|
||||||
// *-M1 - * - M2
|
// *-M0 - * - M1
|
||||||
graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X1)
|
graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X0)
|
||||||
graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X1, X2 | M1)
|
graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X0, X1 | M0)
|
||||||
graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X2)
|
graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X1)
|
||||||
graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M1)
|
graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M0)
|
||||||
|
|
||||||
initial.insert<double>(X(1), 1);
|
initial.insert<double>(X(0), 1);
|
||||||
initial.insert<double>(X(2), 2);
|
initial.insert<double>(X(1), 2);
|
||||||
|
|
||||||
// Run update step
|
// Run update step
|
||||||
isam.update(graph1, initial);
|
isam.update(graph1, initial);
|
||||||
HybridGaussianISAM bayesTree = isam.bayesTree();
|
HybridGaussianISAM bayesTree = isam.bayesTree();
|
||||||
|
|
||||||
auto discreteConditional_m1 =
|
auto discreteConditional_m0 =
|
||||||
bayesTree[M(1)]->conditional()->asDiscreteConditional();
|
bayesTree[M(0)]->conditional()->asDiscreteConditional();
|
||||||
EXPECT(discreteConditional_m1->keys() == KeyVector({M(1)}));
|
EXPECT(discreteConditional_m0->keys() == KeyVector({M(0)}));
|
||||||
|
|
||||||
/********************************************************/
|
/********************************************************/
|
||||||
// New factor graph for incremental update.
|
// New factor graph for incremental update.
|
||||||
HybridNonlinearFactorGraph graph2;
|
HybridNonlinearFactorGraph graph2;
|
||||||
initial = Values();
|
initial = Values();
|
||||||
|
|
||||||
initial.insert<double>(X(3), 3);
|
initial.insert<double>(X(2), 3);
|
||||||
|
|
||||||
graph2.push_back(switching.nonlinearFactorGraph.at(2)); // P(X2, X3 | M2)
|
graph2.push_back(switching.nonlinearFactorGraph.at(2)); // P(X1, X2 | M1)
|
||||||
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X3)
|
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X2)
|
||||||
graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M1, M2)
|
graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M0, M1)
|
||||||
|
|
||||||
isam.update(graph2, initial);
|
isam.update(graph2, initial);
|
||||||
bayesTree = isam.bayesTree();
|
bayesTree = isam.bayesTree();
|
||||||
|
@ -145,9 +145,9 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
|
||||||
/********************************************************/
|
/********************************************************/
|
||||||
// Run batch elimination so we can compare results.
|
// Run batch elimination so we can compare results.
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
|
ordering += X(0);
|
||||||
ordering += X(1);
|
ordering += X(1);
|
||||||
ordering += X(2);
|
ordering += X(2);
|
||||||
ordering += X(3);
|
|
||||||
|
|
||||||
// Now we calculate the actual factors using full elimination
|
// Now we calculate the actual factors using full elimination
|
||||||
HybridBayesTree::shared_ptr expectedHybridBayesTree;
|
HybridBayesTree::shared_ptr expectedHybridBayesTree;
|
||||||
|
@ -155,67 +155,67 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
|
||||||
std::tie(expectedHybridBayesTree, expectedRemainingGraph) =
|
std::tie(expectedHybridBayesTree, expectedRemainingGraph) =
|
||||||
switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering);
|
switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering);
|
||||||
|
|
||||||
|
// The densities on X(1) should be the same
|
||||||
|
auto x0_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||||
|
bayesTree[X(0)]->conditional()->inner());
|
||||||
|
auto expected_x0_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||||
|
(*expectedHybridBayesTree)[X(0)]->conditional()->inner());
|
||||||
|
EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional));
|
||||||
|
|
||||||
// The densities on X(1) should be the same
|
// The densities on X(1) should be the same
|
||||||
auto x1_conditional = dynamic_pointer_cast<GaussianMixture>(
|
auto x1_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||||
bayesTree[X(1)]->conditional()->inner());
|
bayesTree[X(1)]->conditional()->inner());
|
||||||
auto actual_x1_conditional = dynamic_pointer_cast<GaussianMixture>(
|
auto expected_x1_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||||
(*expectedHybridBayesTree)[X(1)]->conditional()->inner());
|
(*expectedHybridBayesTree)[X(1)]->conditional()->inner());
|
||||||
EXPECT(assert_equal(*x1_conditional, *actual_x1_conditional));
|
EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional));
|
||||||
|
|
||||||
// The densities on X(2) should be the same
|
// The densities on X(2) should be the same
|
||||||
auto x2_conditional = dynamic_pointer_cast<GaussianMixture>(
|
auto x2_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||||
bayesTree[X(2)]->conditional()->inner());
|
bayesTree[X(2)]->conditional()->inner());
|
||||||
auto actual_x2_conditional = dynamic_pointer_cast<GaussianMixture>(
|
auto expected_x2_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||||
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
|
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
|
||||||
EXPECT(assert_equal(*x2_conditional, *actual_x2_conditional));
|
EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional));
|
||||||
|
|
||||||
// The densities on X(3) should be the same
|
|
||||||
auto x3_conditional = dynamic_pointer_cast<GaussianMixture>(
|
|
||||||
bayesTree[X(3)]->conditional()->inner());
|
|
||||||
auto actual_x3_conditional = dynamic_pointer_cast<GaussianMixture>(
|
|
||||||
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
|
|
||||||
EXPECT(assert_equal(*x3_conditional, *actual_x3_conditional));
|
|
||||||
|
|
||||||
// We only perform manual continuous elimination for 0,0.
|
// We only perform manual continuous elimination for 0,0.
|
||||||
// The other discrete probabilities on M(2) are calculated the same way
|
// The other discrete probabilities on M(1) are calculated the same way
|
||||||
Ordering discrete_ordering;
|
Ordering discrete_ordering;
|
||||||
|
discrete_ordering += M(0);
|
||||||
discrete_ordering += M(1);
|
discrete_ordering += M(1);
|
||||||
discrete_ordering += M(2);
|
|
||||||
HybridBayesTree::shared_ptr discreteBayesTree =
|
HybridBayesTree::shared_ptr discreteBayesTree =
|
||||||
expectedRemainingGraph->eliminateMultifrontal(discrete_ordering);
|
expectedRemainingGraph->eliminateMultifrontal(discrete_ordering);
|
||||||
|
|
||||||
DiscreteValues m00;
|
DiscreteValues m00;
|
||||||
m00[M(1)] = 0, m00[M(2)] = 0;
|
m00[M(0)] = 0, m00[M(1)] = 0;
|
||||||
DiscreteConditional decisionTree =
|
DiscreteConditional decisionTree =
|
||||||
*(*discreteBayesTree)[M(2)]->conditional()->asDiscreteConditional();
|
*(*discreteBayesTree)[M(1)]->conditional()->asDiscreteConditional();
|
||||||
double m00_prob = decisionTree(m00);
|
double m00_prob = decisionTree(m00);
|
||||||
|
|
||||||
auto discreteConditional =
|
auto discreteConditional =
|
||||||
bayesTree[M(2)]->conditional()->asDiscreteConditional();
|
bayesTree[M(1)]->conditional()->asDiscreteConditional();
|
||||||
|
|
||||||
// Test if the probability values are as expected with regression tests.
|
// Test if the probability values are as expected with regression tests.
|
||||||
DiscreteValues assignment;
|
DiscreteValues assignment;
|
||||||
EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5));
|
EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5));
|
||||||
|
assignment[M(0)] = 0;
|
||||||
assignment[M(1)] = 0;
|
assignment[M(1)] = 0;
|
||||||
assignment[M(2)] = 0;
|
|
||||||
EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5));
|
EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5));
|
||||||
assignment[M(1)] = 1;
|
assignment[M(0)] = 1;
|
||||||
assignment[M(2)] = 0;
|
|
||||||
EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5));
|
|
||||||
assignment[M(1)] = 0;
|
assignment[M(1)] = 0;
|
||||||
assignment[M(2)] = 1;
|
EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5));
|
||||||
EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5));
|
assignment[M(0)] = 0;
|
||||||
|
assignment[M(1)] = 1;
|
||||||
|
EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5));
|
||||||
|
assignment[M(0)] = 1;
|
||||||
assignment[M(1)] = 1;
|
assignment[M(1)] = 1;
|
||||||
assignment[M(2)] = 1;
|
|
||||||
EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5));
|
EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5));
|
||||||
|
|
||||||
// Check if the clique conditional generated from incremental elimination
|
// Check if the clique conditional generated from incremental elimination
|
||||||
// matches that of batch elimination.
|
// matches that of batch elimination.
|
||||||
auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal();
|
auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal();
|
||||||
auto expectedConditional = dynamic_pointer_cast<DecisionTreeFactor>(
|
auto expectedConditional = dynamic_pointer_cast<DecisionTreeFactor>(
|
||||||
(*expectedChordal)[M(2)]->conditional()->inner());
|
(*expectedChordal)[M(1)]->conditional()->inner());
|
||||||
auto actualConditional = dynamic_pointer_cast<DecisionTreeFactor>(
|
auto actualConditional = dynamic_pointer_cast<DecisionTreeFactor>(
|
||||||
bayesTree[M(2)]->conditional()->inner());
|
bayesTree[M(1)]->conditional()->inner());
|
||||||
EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6));
|
EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -227,22 +227,22 @@ TEST(HybridNonlinearISAM, Approx_inference) {
|
||||||
HybridNonlinearFactorGraph graph1;
|
HybridNonlinearFactorGraph graph1;
|
||||||
Values initial;
|
Values initial;
|
||||||
|
|
||||||
// Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4
|
// Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3
|
||||||
for (size_t i = 1; i < 4; i++) {
|
for (size_t i = 1; i < 4; i++) {
|
||||||
graph1.push_back(switching.nonlinearFactorGraph.at(i));
|
graph1.push_back(switching.nonlinearFactorGraph.at(i));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add the Gaussian factors, 1 prior on X(1),
|
// Add the Gaussian factors, 1 prior on X(0),
|
||||||
// 3 measurements on X(2), X(3), X(4)
|
// 3 measurements on X(1), X(2), X(3)
|
||||||
graph1.push_back(switching.nonlinearFactorGraph.at(0));
|
graph1.push_back(switching.nonlinearFactorGraph.at(0));
|
||||||
for (size_t i = 4; i <= 7; i++) {
|
for (size_t i = 4; i <= 7; i++) {
|
||||||
graph1.push_back(switching.nonlinearFactorGraph.at(i));
|
graph1.push_back(switching.nonlinearFactorGraph.at(i));
|
||||||
initial.insert<double>(X(i - 3), i - 3);
|
initial.insert<double>(X(i - 4), i - 3);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create ordering.
|
// Create ordering.
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
for (size_t j = 1; j <= 4; j++) {
|
for (size_t j = 0; j < 4; j++) {
|
||||||
ordering += X(j);
|
ordering += X(j);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -292,26 +292,26 @@ TEST(HybridNonlinearISAM, Approx_inference) {
|
||||||
1 1 1 Leaf 0.5
|
1 1 1 Leaf 0.5
|
||||||
*/
|
*/
|
||||||
|
|
||||||
auto discreteConditional_m1 = *dynamic_pointer_cast<DiscreteConditional>(
|
auto discreteConditional_m0 = *dynamic_pointer_cast<DiscreteConditional>(
|
||||||
bayesTree[M(1)]->conditional()->inner());
|
bayesTree[M(0)]->conditional()->inner());
|
||||||
EXPECT(discreteConditional_m1.keys() == KeyVector({M(1), M(2), M(3)}));
|
EXPECT(discreteConditional_m0.keys() == KeyVector({M(0), M(1), M(2)}));
|
||||||
|
|
||||||
// Get the number of elements which are greater than 0.
|
// Get the number of elements which are greater than 0.
|
||||||
auto count = [](const double &value, int count) {
|
auto count = [](const double &value, int count) {
|
||||||
return value > 0 ? count + 1 : count;
|
return value > 0 ? count + 1 : count;
|
||||||
};
|
};
|
||||||
// Check that the number of leaves after pruning is 5.
|
// Check that the number of leaves after pruning is 5.
|
||||||
EXPECT_LONGS_EQUAL(5, discreteConditional_m1.fold(count, 0));
|
EXPECT_LONGS_EQUAL(5, discreteConditional_m0.fold(count, 0));
|
||||||
|
|
||||||
// Check that the hybrid nodes of the bayes net match those of the pre-pruning
|
// Check that the hybrid nodes of the bayes net match those of the pre-pruning
|
||||||
// bayes net, at the same positions.
|
// bayes net, at the same positions.
|
||||||
auto &unprunedLastDensity = *dynamic_pointer_cast<GaussianMixture>(
|
auto &unprunedLastDensity = *dynamic_pointer_cast<GaussianMixture>(
|
||||||
unprunedHybridBayesTree->clique(X(4))->conditional()->inner());
|
unprunedHybridBayesTree->clique(X(3))->conditional()->inner());
|
||||||
auto &lastDensity = *dynamic_pointer_cast<GaussianMixture>(
|
auto &lastDensity = *dynamic_pointer_cast<GaussianMixture>(
|
||||||
bayesTree[X(4)]->conditional()->inner());
|
bayesTree[X(3)]->conditional()->inner());
|
||||||
|
|
||||||
std::vector<std::pair<DiscreteValues, double>> assignments =
|
std::vector<std::pair<DiscreteValues, double>> assignments =
|
||||||
discreteConditional_m1.enumerate();
|
discreteConditional_m0.enumerate();
|
||||||
// Loop over all assignments and check the pruned components
|
// Loop over all assignments and check the pruned components
|
||||||
for (auto &&av : assignments) {
|
for (auto &&av : assignments) {
|
||||||
const DiscreteValues &assignment = av.first;
|
const DiscreteValues &assignment = av.first;
|
||||||
|
@ -336,18 +336,18 @@ TEST(HybridNonlinearISAM, Incremental_approximate) {
|
||||||
Values initial;
|
Values initial;
|
||||||
|
|
||||||
/***** Run Round 1 *****/
|
/***** Run Round 1 *****/
|
||||||
// Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4
|
// Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3
|
||||||
for (size_t i = 1; i < 4; i++) {
|
for (size_t i = 1; i < 4; i++) {
|
||||||
graph1.push_back(switching.nonlinearFactorGraph.at(i));
|
graph1.push_back(switching.nonlinearFactorGraph.at(i));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add the Gaussian factors, 1 prior on X(1),
|
// Add the Gaussian factors, 1 prior on X(0),
|
||||||
// 3 measurements on X(2), X(3), X(4)
|
// 3 measurements on X(1), X(2), X(3)
|
||||||
graph1.push_back(switching.nonlinearFactorGraph.at(0));
|
graph1.push_back(switching.nonlinearFactorGraph.at(0));
|
||||||
initial.insert<double>(X(1), 1);
|
initial.insert<double>(X(0), 1);
|
||||||
for (size_t i = 5; i <= 7; i++) {
|
for (size_t i = 5; i <= 7; i++) {
|
||||||
graph1.push_back(switching.nonlinearFactorGraph.at(i));
|
graph1.push_back(switching.nonlinearFactorGraph.at(i));
|
||||||
initial.insert<double>(X(i - 3), i - 3);
|
initial.insert<double>(X(i - 4), i - 3);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Run update with pruning
|
// Run update with pruning
|
||||||
|
@ -361,20 +361,20 @@ TEST(HybridNonlinearISAM, Incremental_approximate) {
|
||||||
// each with 2, 4, 8, and 5 (pruned) leaves respetively.
|
// each with 2, 4, 8, and 5 (pruned) leaves respetively.
|
||||||
EXPECT_LONGS_EQUAL(4, bayesTree.size());
|
EXPECT_LONGS_EQUAL(4, bayesTree.size());
|
||||||
EXPECT_LONGS_EQUAL(
|
EXPECT_LONGS_EQUAL(
|
||||||
2, bayesTree[X(1)]->conditional()->asMixture()->nrComponents());
|
2, bayesTree[X(0)]->conditional()->asMixture()->nrComponents());
|
||||||
EXPECT_LONGS_EQUAL(
|
EXPECT_LONGS_EQUAL(
|
||||||
3, bayesTree[X(2)]->conditional()->asMixture()->nrComponents());
|
3, bayesTree[X(1)]->conditional()->asMixture()->nrComponents());
|
||||||
|
EXPECT_LONGS_EQUAL(
|
||||||
|
5, bayesTree[X(2)]->conditional()->asMixture()->nrComponents());
|
||||||
EXPECT_LONGS_EQUAL(
|
EXPECT_LONGS_EQUAL(
|
||||||
5, bayesTree[X(3)]->conditional()->asMixture()->nrComponents());
|
5, bayesTree[X(3)]->conditional()->asMixture()->nrComponents());
|
||||||
EXPECT_LONGS_EQUAL(
|
|
||||||
5, bayesTree[X(4)]->conditional()->asMixture()->nrComponents());
|
|
||||||
|
|
||||||
/***** Run Round 2 *****/
|
/***** Run Round 2 *****/
|
||||||
HybridGaussianFactorGraph graph2;
|
HybridGaussianFactorGraph graph2;
|
||||||
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // x4-x5
|
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // x3-x4
|
||||||
graph2.push_back(switching.nonlinearFactorGraph.at(8)); // x5 measurement
|
graph2.push_back(switching.nonlinearFactorGraph.at(8)); // x4 measurement
|
||||||
initial = Values();
|
initial = Values();
|
||||||
initial.insert<double>(X(5), 5);
|
initial.insert<double>(X(4), 5);
|
||||||
|
|
||||||
// Run update with pruning a second time.
|
// Run update with pruning a second time.
|
||||||
incrementalHybrid.update(graph2, initial);
|
incrementalHybrid.update(graph2, initial);
|
||||||
|
@ -386,9 +386,9 @@ TEST(HybridNonlinearISAM, Incremental_approximate) {
|
||||||
// with 5 (pruned) leaves.
|
// with 5 (pruned) leaves.
|
||||||
CHECK_EQUAL(5, bayesTree.size());
|
CHECK_EQUAL(5, bayesTree.size());
|
||||||
EXPECT_LONGS_EQUAL(
|
EXPECT_LONGS_EQUAL(
|
||||||
5, bayesTree[X(4)]->conditional()->asMixture()->nrComponents());
|
5, bayesTree[X(3)]->conditional()->asMixture()->nrComponents());
|
||||||
EXPECT_LONGS_EQUAL(
|
EXPECT_LONGS_EQUAL(
|
||||||
5, bayesTree[X(5)]->conditional()->asMixture()->nrComponents());
|
5, bayesTree[X(4)]->conditional()->asMixture()->nrComponents());
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************/
|
/* ************************************************************************/
|
||||||
|
@ -401,7 +401,7 @@ TEST(HybridNonlinearISAM, NonTrivial) {
|
||||||
HybridNonlinearFactorGraph fg;
|
HybridNonlinearFactorGraph fg;
|
||||||
HybridNonlinearISAM inc;
|
HybridNonlinearISAM inc;
|
||||||
|
|
||||||
// Add a prior on pose x1 at the origin.
|
// Add a prior on pose x0 at the origin.
|
||||||
// A prior factor consists of a mean and
|
// A prior factor consists of a mean and
|
||||||
// a noise model (covariance matrix)
|
// a noise model (covariance matrix)
|
||||||
Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin
|
Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin
|
||||||
|
|
Loading…
Reference in New Issue