figured out how to get the correct continuous errors
parent
a6d1a57478
commit
2f2f8c9ed5
|
@ -69,56 +69,57 @@ Ordering getOrdering(HybridGaussianFactorGraph& factors,
|
|||
return ordering;
|
||||
}
|
||||
|
||||
/****************************************************************************/
|
||||
// Test approximate inference with an additional pruning step.
|
||||
TEST(HybridEstimation, Incremental) {
|
||||
size_t K = 15;
|
||||
std::vector<double> measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, 7, 8, 9, 9, 9, 10, 11, 11, 11, 11};
|
||||
// Ground truth discrete seq
|
||||
std::vector<size_t> discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0};
|
||||
Switching switching(K, 1.0, 0.1, measurements);
|
||||
HybridSmoother smoother;
|
||||
HybridNonlinearFactorGraph graph;
|
||||
Values initial;
|
||||
// /****************************************************************************/
|
||||
// // Test approximate inference with an additional pruning step.
|
||||
// TEST(HybridEstimation, Incremental) {
|
||||
// size_t K = 15;
|
||||
// std::vector<double> measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, 7, 8,
|
||||
// 9, 9, 9, 10, 11, 11, 11, 11};
|
||||
// // Ground truth discrete seq
|
||||
// std::vector<size_t> discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1,
|
||||
// 0, 0, 1, 1, 0, 0, 0}; Switching switching(K, 1.0, 0.1, measurements);
|
||||
// HybridSmoother smoother;
|
||||
// HybridNonlinearFactorGraph graph;
|
||||
// Values initial;
|
||||
|
||||
// Add the X(0) prior
|
||||
graph.push_back(switching.nonlinearFactorGraph.at(0));
|
||||
initial.insert(X(0), switching.linearizationPoint.at<double>(X(0)));
|
||||
// // Add the X(0) prior
|
||||
// graph.push_back(switching.nonlinearFactorGraph.at(0));
|
||||
// initial.insert(X(0), switching.linearizationPoint.at<double>(X(0)));
|
||||
|
||||
HybridGaussianFactorGraph linearized;
|
||||
HybridGaussianFactorGraph bayesNet;
|
||||
// HybridGaussianFactorGraph linearized;
|
||||
// HybridGaussianFactorGraph bayesNet;
|
||||
|
||||
for (size_t k = 1; k < K; k++) {
|
||||
// Motion Model
|
||||
graph.push_back(switching.nonlinearFactorGraph.at(k));
|
||||
// Measurement
|
||||
graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1));
|
||||
// for (size_t k = 1; k < K; k++) {
|
||||
// // Motion Model
|
||||
// graph.push_back(switching.nonlinearFactorGraph.at(k));
|
||||
// // Measurement
|
||||
// graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1));
|
||||
|
||||
initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
|
||||
// initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
|
||||
|
||||
bayesNet = smoother.hybridBayesNet();
|
||||
linearized = *graph.linearize(initial);
|
||||
Ordering ordering = getOrdering(bayesNet, linearized);
|
||||
// bayesNet = smoother.hybridBayesNet();
|
||||
// linearized = *graph.linearize(initial);
|
||||
// Ordering ordering = getOrdering(bayesNet, linearized);
|
||||
|
||||
smoother.update(linearized, ordering, 3);
|
||||
graph.resize(0);
|
||||
}
|
||||
HybridValues delta = smoother.hybridBayesNet().optimize();
|
||||
// smoother.update(linearized, ordering, 3);
|
||||
// graph.resize(0);
|
||||
// }
|
||||
// HybridValues delta = smoother.hybridBayesNet().optimize();
|
||||
|
||||
Values result = initial.retract(delta.continuous());
|
||||
// Values result = initial.retract(delta.continuous());
|
||||
|
||||
DiscreteValues expected_discrete;
|
||||
for (size_t k = 0; k < K - 1; k++) {
|
||||
expected_discrete[M(k)] = discrete_seq[k];
|
||||
}
|
||||
EXPECT(assert_equal(expected_discrete, delta.discrete()));
|
||||
// DiscreteValues expected_discrete;
|
||||
// for (size_t k = 0; k < K - 1; k++) {
|
||||
// expected_discrete[M(k)] = discrete_seq[k];
|
||||
// }
|
||||
// EXPECT(assert_equal(expected_discrete, delta.discrete()));
|
||||
|
||||
Values expected_continuous;
|
||||
for (size_t k = 0; k < K; k++) {
|
||||
expected_continuous.insert(X(k), measurements[k]);
|
||||
}
|
||||
EXPECT(assert_equal(expected_continuous, result));
|
||||
}
|
||||
// Values expected_continuous;
|
||||
// for (size_t k = 0; k < K; k++) {
|
||||
// expected_continuous.insert(X(k), measurements[k]);
|
||||
// }
|
||||
// EXPECT(assert_equal(expected_continuous, result));
|
||||
// }
|
||||
|
||||
/**
|
||||
* @brief A function to get a specific 1D robot motion problem as a linearized
|
||||
|
@ -188,6 +189,7 @@ TEST(HybridEstimation, Probability) {
|
|||
|
||||
double between_sigma = 1.0, measurement_sigma = 0.1;
|
||||
|
||||
std::vector<double> expected_errors, expected_prob_primes;
|
||||
for (size_t i = 0; i < pow(2, K - 1); i++) {
|
||||
std::vector<size_t> discrete_seq = getDiscreteSequence<K>(i);
|
||||
|
||||
|
@ -195,19 +197,80 @@ TEST(HybridEstimation, Probability) {
|
|||
K, measurements, discrete_seq, measurement_sigma, between_sigma);
|
||||
|
||||
auto bayes_net = linear_graph->eliminateSequential();
|
||||
// graph.print();
|
||||
// bayes_net->print();
|
||||
|
||||
VectorValues values = bayes_net->optimize();
|
||||
std::cout << i << " : " << linear_graph->probPrime(values) << std::endl;
|
||||
|
||||
expected_errors.push_back(linear_graph->error(values));
|
||||
expected_prob_primes.push_back(linear_graph->probPrime(values));
|
||||
std::cout << i << " : " << expected_errors.at(i) << "\t|\t"
|
||||
<< expected_prob_primes.at(i) << std::endl;
|
||||
}
|
||||
|
||||
// std::vector<size_t> discrete_seq = getDiscreteSequence<K>(0);
|
||||
// GaussianFactorGraph::shared_ptr linear_graph = specificProblem(
|
||||
// K, measurements, discrete_seq, measurement_sigma, between_sigma);
|
||||
// auto bayes_net = linear_graph->eliminateSequential();
|
||||
// VectorValues values = bayes_net->optimize();
|
||||
// std::cout << "Total NLFG Error: " << linear_graph->error(values) << std::endl;
|
||||
// std::cout << "===============" << std::endl;
|
||||
|
||||
Switching switching(K, between_sigma, measurement_sigma, measurements);
|
||||
auto graph = switching.linearizedFactorGraph;
|
||||
Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph());
|
||||
HybridBayesNet::shared_ptr bayesNet = graph.eliminateSequential(ordering);
|
||||
const DecisionTreeFactor::shared_ptr decisionTree =
|
||||
bayesNet->discreteConditionals();
|
||||
decisionTree->print();
|
||||
|
||||
HybridBayesNet::shared_ptr bayesNet;
|
||||
HybridGaussianFactorGraph::shared_ptr remainingGraph;
|
||||
Ordering continuous(graph.continuousKeys());
|
||||
std::tie(bayesNet, remainingGraph) =
|
||||
graph.eliminatePartialSequential(continuous);
|
||||
|
||||
auto last_conditional = bayesNet->at(bayesNet->size() - 1);
|
||||
DiscreteKeys discrete_keys = last_conditional->discreteKeys();
|
||||
const std::vector<DiscreteValues> assignments =
|
||||
DiscreteValues::CartesianProduct(discrete_keys);
|
||||
|
||||
vector<VectorValues::shared_ptr> vector_values;
|
||||
for (const DiscreteValues& assignment : assignments) {
|
||||
VectorValues values = bayesNet->optimize(assignment);
|
||||
vector_values.push_back(boost::make_shared<VectorValues>(values));
|
||||
}
|
||||
std::reverse(discrete_keys.begin(), discrete_keys.end());
|
||||
DecisionTree<Key, VectorValues::shared_ptr> delta_tree(discrete_keys,
|
||||
vector_values);
|
||||
|
||||
vector<double> probPrimes;
|
||||
for (const DiscreteValues& assignment : assignments) {
|
||||
double error = 0.0;
|
||||
VectorValues delta = *delta_tree(assignment);
|
||||
for (auto factor : graph) {
|
||||
if (factor->isHybrid()) {
|
||||
auto f = boost::static_pointer_cast<GaussianMixtureFactor>(factor);
|
||||
error += f->error(delta, assignment);
|
||||
|
||||
} else if (factor->isContinuous()) {
|
||||
auto f = boost::static_pointer_cast<HybridGaussianFactor>(factor);
|
||||
error += f->inner()->error(delta);
|
||||
}
|
||||
}
|
||||
// std::cout << "\n" << std::endl;
|
||||
// assignment.print();
|
||||
// std::cout << error << " | " << exp(-error) << std::endl;
|
||||
probPrimes.push_back(exp(-error));
|
||||
}
|
||||
AlgebraicDecisionTree<Key> expected_probPrimeTree(discrete_keys, probPrimes);
|
||||
expected_probPrimeTree.print("", DefaultKeyFormatter);
|
||||
|
||||
// remainingGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree));
|
||||
|
||||
// Ordering discrete(graph.discreteKeys());
|
||||
// // remainingGraph->print("remainingGraph");
|
||||
// // discrete.print();
|
||||
// auto discreteBayesNet = remainingGraph->eliminateSequential(discrete);
|
||||
// bayesNet->add(*discreteBayesNet);
|
||||
// // bayesNet->print();
|
||||
|
||||
// HybridValues hybrid_values = bayesNet->optimize();
|
||||
// hybrid_values.discrete().print();
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
Loading…
Reference in New Issue