Restrict dangerous flat access

release/4.3a0
Frank Dellaert 2024-10-22 15:39:14 -07:00
parent 5145b8c47a
commit 777f0d25ad
4 changed files with 102 additions and 104 deletions

View File

@ -120,12 +120,21 @@ using MotionModel = BetweenFactor<double>;
// Test fixture with switching network.
/// ϕ(X(0)) .. ϕ(X(k),X(k+1)) .. ϕ(X(k);z_k) .. ϕ(M(0)) .. ϕ(M(K-3),M(K-2))
struct Switching {
private:
HybridNonlinearFactorGraph nonlinearFactorGraph_;
public:
size_t K;
DiscreteKeys modes;
HybridNonlinearFactorGraph nonlinearFactorGraph;
HybridNonlinearFactorGraph unaryFactors, binaryFactors, modeChain;
HybridGaussianFactorGraph linearizedFactorGraph;
Values linearizationPoint;
// Access the flat nonlinear factor graph.
const HybridNonlinearFactorGraph &nonlinearFactorGraph() const {
return nonlinearFactorGraph_;
}
/**
* @brief Create with given number of time steps.
*
@ -136,7 +145,7 @@ struct Switching {
*/
Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1,
std::vector<double> measurements = {},
std::string discrete_transition_prob = "1/2 3/2")
std::string transitionProbabilityTable = "1/2 3/2")
: K(K) {
using noiseModel::Isotropic;
@ -155,32 +164,36 @@ struct Switching {
// Create hybrid factor graph.
// Add a prior ϕ(X(0)) on X(0).
nonlinearFactorGraph.emplace_shared<PriorFactor<double>>(
nonlinearFactorGraph_.emplace_shared<PriorFactor<double>>(
X(0), measurements.at(0), Isotropic::Sigma(1, prior_sigma));
unaryFactors.push_back(nonlinearFactorGraph_.back());
// Add "motion models" ϕ(X(k),X(k+1),M(k)).
for (size_t k = 0; k < K - 1; k++) {
auto motion_models = motionModels(k, between_sigma);
nonlinearFactorGraph.emplace_shared<HybridNonlinearFactor>(modes[k],
nonlinearFactorGraph_.emplace_shared<HybridNonlinearFactor>(modes[k],
motion_models);
binaryFactors.push_back(nonlinearFactorGraph_.back());
}
// Add measurement factors ϕ(X(k);z_k).
auto measurement_noise = Isotropic::Sigma(1, prior_sigma);
for (size_t k = 1; k < K; k++) {
nonlinearFactorGraph.emplace_shared<PriorFactor<double>>(
nonlinearFactorGraph_.emplace_shared<PriorFactor<double>>(
X(k), measurements.at(k), measurement_noise);
unaryFactors.push_back(nonlinearFactorGraph_.back());
}
// Add "mode chain" ϕ(M(0)) ϕ(M(0),M(1)) ... ϕ(M(K-3),M(K-2))
addModeChain(&nonlinearFactorGraph, discrete_transition_prob);
modeChain = createModeChain(transitionProbabilityTable);
nonlinearFactorGraph_ += modeChain;
// Create the linearization point.
for (size_t k = 0; k < K; k++) {
linearizationPoint.insert<double>(X(k), static_cast<double>(k + 1));
}
linearizedFactorGraph = *nonlinearFactorGraph.linearize(linearizationPoint);
linearizedFactorGraph = *nonlinearFactorGraph_.linearize(linearizationPoint);
}
// Create motion models for a given time step
@ -200,15 +213,16 @@ struct Switching {
*
* @param fg The factor graph to which the mode chain is added.
*/
template <typename FACTORGRAPH>
void addModeChain(FACTORGRAPH *fg,
std::string discrete_transition_prob = "1/2 3/2") {
fg->template emplace_shared<DiscreteDistribution>(modes[0], "1/1");
HybridNonlinearFactorGraph createModeChain(
std::string transitionProbabilityTable = "1/2 3/2") {
HybridNonlinearFactorGraph chain;
chain.emplace_shared<DiscreteDistribution>(modes[0], "1/1");
for (size_t k = 0; k < K - 2; k++) {
auto parents = {modes[k]};
fg->template emplace_shared<DiscreteConditional>(
modes[k + 1], parents, discrete_transition_prob);
chain.emplace_shared<DiscreteConditional>(modes[k + 1], parents,
transitionProbabilityTable);
}
return chain;
}
};

View File

@ -37,6 +37,8 @@
// Include for test suite
#include <CppUnitLite/TestHarness.h>
#include <string>
#include "Switching.h"
using namespace std;
@ -55,13 +57,16 @@ std::vector<size_t> discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0,
Switching InitializeEstimationProblem(
const size_t K, const double between_sigma, const double measurement_sigma,
const std::vector<double>& measurements,
const std::string& discrete_transition_prob,
const std::string& transitionProbabilityTable,
HybridNonlinearFactorGraph& graph, Values& initial) {
Switching switching(K, between_sigma, measurement_sigma, measurements,
discrete_transition_prob);
transitionProbabilityTable);
// Add prior on M(0)
graph.push_back(switching.modeChain.at(0));
// Add the X(0) prior
graph.push_back(switching.nonlinearFactorGraph.at(0));
graph.push_back(switching.unaryFactors.at(0));
initial.insert(X(0), switching.linearizationPoint.at<double>(X(0)));
return switching;
@ -128,10 +133,9 @@ TEST(HybridEstimation, IncrementalSmoother) {
constexpr size_t maxNrLeaves = 3;
for (size_t k = 1; k < K; k++) {
// Motion Model
graph.push_back(switching.nonlinearFactorGraph.at(k));
// Measurement
graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1));
if (k > 1) graph.push_back(switching.modeChain.at(k - 1)); // Mode chain
graph.push_back(switching.binaryFactors.at(k - 1)); // Motion Model
graph.push_back(switching.unaryFactors.at(k)); // Measurement
initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
@ -176,10 +180,9 @@ TEST(HybridEstimation, ValidPruningError) {
constexpr size_t maxNrLeaves = 3;
for (size_t k = 1; k < K; k++) {
// Motion Model
graph.push_back(switching.nonlinearFactorGraph.at(k));
// Measurement
graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1));
if (k > 1) graph.push_back(switching.modeChain.at(k - 1)); // Mode chain
graph.push_back(switching.binaryFactors.at(k - 1)); // Motion Model
graph.push_back(switching.unaryFactors.at(k)); // Measurement
initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
@ -225,15 +228,17 @@ TEST(HybridEstimation, ISAM) {
HybridGaussianFactorGraph linearized;
const size_t maxNrLeaves = 3;
for (size_t k = 1; k < K; k++) {
// Motion Model
graph.push_back(switching.nonlinearFactorGraph.at(k));
// Measurement
graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1));
if (k > 1) graph.push_back(switching.modeChain.at(k - 1)); // Mode chain
graph.push_back(switching.binaryFactors.at(k - 1)); // Motion Model
graph.push_back(switching.unaryFactors.at(k)); // Measurement
initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
isam.update(graph, initial, 3);
isam.update(graph, initial, maxNrLeaves);
// isam.saveGraph("NLiSAM" + std::to_string(k) + ".dot");
// GTSAM_PRINT(isam);
graph.resize(0);
initial.clear();

View File

@ -216,8 +216,8 @@ TEST(HybridNonlinearFactorGraph, PushBack) {
TEST(HybridNonlinearFactorGraph, ErrorTree) {
Switching s(3);
HybridNonlinearFactorGraph graph = s.nonlinearFactorGraph;
Values values = s.linearizationPoint;
const HybridNonlinearFactorGraph &graph = s.nonlinearFactorGraph();
const Values &values = s.linearizationPoint;
auto error_tree = graph.errorTree(s.linearizationPoint);
@ -248,7 +248,7 @@ TEST(HybridNonlinearFactorGraph, ErrorTree) {
TEST(HybridNonlinearFactorGraph, Switching) {
Switching self(3);
EXPECT_LONGS_EQUAL(7, self.nonlinearFactorGraph.size());
EXPECT_LONGS_EQUAL(7, self.nonlinearFactorGraph().size());
EXPECT_LONGS_EQUAL(7, self.linearizedFactorGraph.size());
}
@ -260,7 +260,7 @@ TEST(HybridNonlinearFactorGraph, Linearization) {
// Linearize here:
HybridGaussianFactorGraph actualLinearized =
*self.nonlinearFactorGraph.linearize(self.linearizationPoint);
*self.nonlinearFactorGraph().linearize(self.linearizationPoint);
EXPECT_LONGS_EQUAL(7, actualLinearized.size());
}
@ -409,7 +409,7 @@ TEST(HybridNonlinearFactorGraph, Partial_Elimination) {
/* ****************************************************************************/
TEST(HybridNonlinearFactorGraph, Error) {
Switching self(3);
HybridNonlinearFactorGraph fg = self.nonlinearFactorGraph;
HybridNonlinearFactorGraph fg = self.nonlinearFactorGraph();
{
HybridValues values(VectorValues(), DiscreteValues{{M(0), 0}, {M(1), 0}},
@ -441,8 +441,9 @@ TEST(HybridNonlinearFactorGraph, Error) {
TEST(HybridNonlinearFactorGraph, PrintErrors) {
Switching self(3);
// Get nonlinear factor graph and add linear factors to be holistic
HybridNonlinearFactorGraph fg = self.nonlinearFactorGraph;
// Get nonlinear factor graph and add linear factors to be holistic.
// TODO(Frank): ???
HybridNonlinearFactorGraph fg = self.nonlinearFactorGraph();
fg.add(self.linearizedFactorGraph);
// Optimize to get HybridValues

View File

@ -57,10 +57,10 @@ TEST(HybridNonlinearISAM, IncrementalElimination) {
// | | |
// X0 -*- X1 -*- X2
// \*-M0-*/
graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X0)
graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X0, X1 | M0)
graph1.push_back(switching.nonlinearFactorGraph.at(2)); // P(X1, X2 | M1)
graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M0)
graph1.push_back(switching.unaryFactors.at(0)); // P(X0)
graph1.push_back(switching.binaryFactors.at(0)); // P(X0, X1 | M0)
graph1.push_back(switching.binaryFactors.at(1)); // P(X1, X2 | M1)
graph1.push_back(switching.modeChain.at(0)); // P(M0)
initial.insert<double>(X(0), 1);
initial.insert<double>(X(1), 2);
@ -83,9 +83,9 @@ TEST(HybridNonlinearISAM, IncrementalElimination) {
HybridNonlinearFactorGraph graph2;
initial = Values();
graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X1)
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X2)
graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M0, M1)
graph1.push_back(switching.unaryFactors.at(1)); // P(X1)
graph2.push_back(switching.unaryFactors.at(2)); // P(X2)
graph2.push_back(switching.modeChain.at(1)); // P(M0, M1)
isam.update(graph2, initial);
@ -112,10 +112,10 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
// X0 -*- X1 -*- X2
// | |
// *-M0 - * - M1
graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X0)
graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X0, X1 | M0)
graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X1)
graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M0)
graph1.push_back(switching.unaryFactors.at(0)); // P(X0)
graph1.push_back(switching.binaryFactors.at(0)); // P(X0, X1 | M0)
graph1.push_back(switching.unaryFactors.at(1)); // P(X1)
graph1.push_back(switching.modeChain.at(0)); // P(M0)
initial.insert<double>(X(0), 1);
initial.insert<double>(X(1), 2);
@ -134,9 +134,9 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
initial.insert<double>(X(2), 3);
graph2.push_back(switching.nonlinearFactorGraph.at(2)); // P(X1, X2 | M1)
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X2)
graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M0, M1)
graph2.push_back(switching.binaryFactors.at(1)); // P(X1, X2 | M1)
graph2.push_back(switching.unaryFactors.at(2)); // P(X2)
graph2.push_back(switching.modeChain.at(1)); // P(M0, M1)
isam.update(graph2, initial);
bayesTree = isam.bayesTree();
@ -175,46 +175,22 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional));
// We only perform manual continuous elimination for 0,0.
// The other discrete probabilities on M(1) are calculated the same way
// The other discrete probabilities on M(2) are calculated the same way
const Ordering discreteOrdering{M(0), M(1)};
HybridBayesTree::shared_ptr discreteBayesTree =
expectedRemainingGraph->BaseEliminateable::eliminateMultifrontal(
discreteOrdering);
DiscreteValues m00;
m00[M(0)] = 0, m00[M(1)] = 0;
DiscreteConditional decisionTree =
*(*discreteBayesTree)[M(1)]->conditional()->asDiscrete();
double m00_prob = decisionTree(m00);
auto discreteConditional = bayesTree[M(1)]->conditional()->asDiscrete();
expectedRemainingGraph->eliminateMultifrontal(discreteOrdering);
// Test the probability values with regression tests.
DiscreteValues assignment;
EXPECT(assert_equal(0.0952922, m00_prob, 1e-5));
assignment[M(0)] = 0;
assignment[M(1)] = 0;
EXPECT(assert_equal(0.0952922, (*discreteConditional)(assignment), 1e-5));
assignment[M(0)] = 1;
assignment[M(1)] = 0;
EXPECT(assert_equal(0.282758, (*discreteConditional)(assignment), 1e-5));
assignment[M(0)] = 0;
assignment[M(1)] = 1;
EXPECT(assert_equal(0.314175, (*discreteConditional)(assignment), 1e-5));
assignment[M(0)] = 1;
assignment[M(1)] = 1;
EXPECT(assert_equal(0.307775, (*discreteConditional)(assignment), 1e-5));
auto discrete = bayesTree[M(1)]->conditional()->asDiscrete();
EXPECT(assert_equal(0.095292, (*discrete)({{M(0), 0}, {M(1), 0}}), 1e-5));
EXPECT(assert_equal(0.282758, (*discrete)({{M(0), 1}, {M(1), 0}}), 1e-5));
EXPECT(assert_equal(0.314175, (*discrete)({{M(0), 0}, {M(1), 1}}), 1e-5));
EXPECT(assert_equal(0.307775, (*discrete)({{M(0), 1}, {M(1), 1}}), 1e-5));
// Check if the clique conditional generated from incremental elimination
// Check that the clique conditional generated from incremental elimination
// matches that of batch elimination.
auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal();
auto actualConditional = dynamic_pointer_cast<DecisionTreeFactor>(
bayesTree[M(1)]->conditional()->inner());
// Account for the probability terms from evaluating continuous FGs
DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}};
vector<double> probs = {0.095292197, 0.31417524, 0.28275772, 0.30777485};
auto expectedConditional =
std::make_shared<DecisionTreeFactor>(discrete_keys, probs);
auto expectedConditional = (*discreteBayesTree)[M(1)]->conditional();
auto actualConditional = bayesTree[M(1)]->conditional();
EXPECT(assert_equal(*expectedConditional, *actualConditional, 1e-6));
}
@ -227,18 +203,19 @@ TEST(HybridNonlinearISAM, Approx_inference) {
Values initial;
// Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3
for (size_t i = 1; i < 4; i++) {
graph1.push_back(switching.nonlinearFactorGraph.at(i));
for (size_t i = 0; i < 3; i++) {
graph1.push_back(switching.binaryFactors.at(i));
}
// Add the Gaussian factors, 1 prior on X(0),
// 3 measurements on X(1), X(2), X(3)
graph1.push_back(switching.nonlinearFactorGraph.at(0));
for (size_t i = 4; i <= 7; i++) {
graph1.push_back(switching.nonlinearFactorGraph.at(i));
initial.insert<double>(X(i - 4), i - 3);
for (size_t i = 0; i < 4; i++) {
graph1.push_back(switching.unaryFactors.at(i));
initial.insert<double>(X(i), i + 1);
}
// TODO(Frank): no mode chain?
// Create ordering.
Ordering ordering;
for (size_t j = 0; j < 4; j++) {
@ -246,7 +223,7 @@ TEST(HybridNonlinearISAM, Approx_inference) {
}
// Now we calculate the actual factors using full elimination
const auto [unprunedHybridBayesTree, unprunedRemainingGraph] =
const auto [unPrunedHybridBayesTree, unPrunedRemainingGraph] =
switching.linearizedFactorGraph
.BaseEliminateable::eliminatePartialMultifrontal(ordering);
@ -257,7 +234,7 @@ TEST(HybridNonlinearISAM, Approx_inference) {
bayesTree.prune(maxNrLeaves);
/*
unpruned factor is:
unPruned factor is:
Choice(m3)
0 Choice(m2)
0 0 Choice(m1)
@ -303,8 +280,8 @@ TEST(HybridNonlinearISAM, Approx_inference) {
// Check that the hybrid nodes of the bayes net match those of the pre-pruning
// bayes net, at the same positions.
auto &unprunedLastDensity = *dynamic_pointer_cast<HybridGaussianConditional>(
unprunedHybridBayesTree->clique(X(3))->conditional()->inner());
auto &unPrunedLastDensity = *dynamic_pointer_cast<HybridGaussianConditional>(
unPrunedHybridBayesTree->clique(X(3))->conditional()->inner());
auto &lastDensity = *dynamic_pointer_cast<HybridGaussianConditional>(
bayesTree[X(3)]->conditional()->inner());
@ -319,7 +296,7 @@ TEST(HybridNonlinearISAM, Approx_inference) {
EXPECT(lastDensity(assignment) == nullptr);
} else {
CHECK(lastDensity(assignment));
EXPECT(assert_equal(*unprunedLastDensity(assignment),
EXPECT(assert_equal(*unPrunedLastDensity(assignment),
*lastDensity(assignment)));
}
}
@ -335,19 +312,20 @@ TEST(HybridNonlinearISAM, Incremental_approximate) {
/***** Run Round 1 *****/
// Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3
for (size_t i = 1; i < 4; i++) {
graph1.push_back(switching.nonlinearFactorGraph.at(i));
for (size_t i = 0; i < 3; i++) {
graph1.push_back(switching.binaryFactors.at(i));
}
// Add the Gaussian factors, 1 prior on X(0),
// 3 measurements on X(1), X(2), X(3)
graph1.push_back(switching.nonlinearFactorGraph.at(0));
initial.insert<double>(X(0), 1);
for (size_t i = 5; i <= 7; i++) {
graph1.push_back(switching.nonlinearFactorGraph.at(i));
initial.insert<double>(X(i - 4), i - 3);
for (size_t i = 0; i < 4; i++) {
graph1.push_back(switching.unaryFactors.at(i));
initial.insert<double>(X(i), i + 1);
}
// TODO(Frank): no mode chain?
// Run update with pruning
size_t maxComponents = 5;
incrementalHybrid.update(graph1, initial);
@ -368,8 +346,8 @@ TEST(HybridNonlinearISAM, Incremental_approximate) {
/***** Run Round 2 *****/
HybridGaussianFactorGraph graph2;
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // x3-x4
graph2.push_back(switching.nonlinearFactorGraph.at(8)); // x4 measurement
graph2.push_back(switching.binaryFactors.at(3)); // x3-x4
graph2.push_back(switching.unaryFactors.at(4)); // x4 measurement
initial = Values();
initial.insert<double>(X(4), 5);