add code for simplified hybrid estimation
parent
8c10cd554d
commit
b4c70f2ef9
|
|
@ -55,6 +55,52 @@ namespace gtsam {
|
|||
|
||||
template class EliminateableFactorGraph<HybridGaussianFactorGraph>;
|
||||
|
||||
std::string GTDKeyFormatter(const Key &key) {
|
||||
constexpr size_t kMax_uchar_ = std::numeric_limits<uint8_t>::max();
|
||||
constexpr size_t key_bits = sizeof(gtsam::Key) * 8;
|
||||
constexpr size_t ch1_bits = sizeof(uint8_t) * 8;
|
||||
constexpr size_t ch2_bits = sizeof(uint8_t) * 8;
|
||||
constexpr size_t link_bits = sizeof(uint8_t) * 8;
|
||||
constexpr size_t joint_bits = sizeof(uint8_t) * 8;
|
||||
constexpr size_t time_bits =
|
||||
key_bits - ch1_bits - ch2_bits - link_bits - joint_bits;
|
||||
|
||||
constexpr gtsam::Key ch1_mask = gtsam::Key(kMax_uchar_)
|
||||
<< (key_bits - ch1_bits);
|
||||
constexpr gtsam::Key ch2_mask = gtsam::Key(kMax_uchar_)
|
||||
<< (key_bits - ch1_bits - ch2_bits);
|
||||
constexpr gtsam::Key link_mask = gtsam::Key(kMax_uchar_)
|
||||
<< (time_bits + joint_bits);
|
||||
constexpr gtsam::Key joint_mask = gtsam::Key(kMax_uchar_) << time_bits;
|
||||
constexpr gtsam::Key time_mask =
|
||||
~(ch1_mask | ch2_mask | link_mask | joint_mask);
|
||||
|
||||
uint8_t c1_, c2_, link_idx_, joint_idx_;
|
||||
uint64_t t_;
|
||||
|
||||
c1_ = (uint8_t)((key & ch1_mask) >> (key_bits - ch1_bits));
|
||||
c2_ = (uint8_t)((key & ch2_mask) >> (key_bits - ch1_bits - ch2_bits));
|
||||
link_idx_ = (uint8_t)((key & link_mask) >> (time_bits + joint_bits));
|
||||
joint_idx_ = (uint8_t)((key & joint_mask) >> time_bits);
|
||||
t_ = key & time_mask;
|
||||
|
||||
std::string s = "";
|
||||
if (c1_ != 0) {
|
||||
s += c1_;
|
||||
}
|
||||
if (c2_ != 0) {
|
||||
s += c2_;
|
||||
}
|
||||
if (link_idx_ != kMax_uchar_) {
|
||||
s += "[" + std::to_string((int)(link_idx_)) + "]";
|
||||
}
|
||||
if (joint_idx_ != kMax_uchar_) {
|
||||
s += "(" + std::to_string((int)(joint_idx_)) + ")";
|
||||
}
|
||||
s += std::to_string(t_);
|
||||
return s;
|
||||
}
|
||||
|
||||
/* ************************************************************************ */
|
||||
static GaussianMixtureFactor::Sum &addGaussian(
|
||||
GaussianMixtureFactor::Sum &sum, const GaussianFactor::shared_ptr &factor) {
|
||||
|
|
@ -213,10 +259,10 @@ hybridElimination(const HybridGaussianFactorGraph &factors,
|
|||
result = EliminatePreferCholesky(graph, frontalKeys);
|
||||
|
||||
if (keysOfEliminated.empty()) {
|
||||
keysOfEliminated =
|
||||
result.first->keys(); // Initialize the keysOfEliminated to be the
|
||||
// Initialize the keysOfEliminated to be the keys of the
|
||||
// eliminated GaussianConditional
|
||||
keysOfEliminated = result.first->keys();
|
||||
}
|
||||
// keysOfEliminated of the GaussianConditional
|
||||
if (keysOfSeparator.empty()) {
|
||||
keysOfSeparator = result.second->keys();
|
||||
}
|
||||
|
|
@ -237,13 +283,18 @@ hybridElimination(const HybridGaussianFactorGraph &factors,
|
|||
|
||||
// If there are no more continuous parents, then we should create here a
|
||||
// DiscreteFactor, with the error for each discrete choice.
|
||||
// frontalKeys.print();
|
||||
// separatorFactors.print("", GTDKeyFormatter, [](const GaussianFactor::shared_ptr &factor) { factor->print(); return "";});
|
||||
// std::cout << "=====================================" << std::endl;
|
||||
if (keysOfSeparator.empty()) {
|
||||
VectorValues empty_values;
|
||||
VectorValues empty_values; //TODO(Varun) Shouldn't this be from the optimized leaf?
|
||||
auto factorError = [&](const GaussianFactor::shared_ptr &factor) {
|
||||
if (!factor) return 0.0; // TODO(fan): does this make sense?
|
||||
return exp(-factor->error(empty_values));
|
||||
};
|
||||
DecisionTree<Key, double> fdt(separatorFactors, factorError);
|
||||
std::cout << "\n\nFactor Decision Tree" << std::endl;
|
||||
fdt.print("", GTDKeyFormatter, [](double x) { return std::to_string(x); });
|
||||
auto discreteFactor =
|
||||
boost::make_shared<DecisionTreeFactor>(discreteSeparator, fdt);
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,226 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
||||
* See LICENSE for the license information
|
||||
|
||||
* -------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* @file testHybridEstimation.cpp
|
||||
* @brief Unit tests for Hybrid Estimation
|
||||
* @author Varun Agrawal
|
||||
*/
|
||||
|
||||
#include <gtsam/base/serializationTestHelpers.h>
|
||||
#include <gtsam/hybrid/HybridBayesNet.h>
|
||||
#include <gtsam/hybrid/HybridBayesTree.h>
|
||||
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
|
||||
#include <gtsam/hybrid/MixtureFactor.h>
|
||||
#include <gtsam/inference/Symbol.h>
|
||||
#include <gtsam/linear/JacobianFactor.h>
|
||||
#include <gtsam/linear/NoiseModel.h>
|
||||
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
|
||||
#include <gtsam/nonlinear/PriorFactor.h>
|
||||
#include <gtsam/slam/BetweenFactor.h>
|
||||
|
||||
// Include for test suite
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
||||
using noiseModel::Isotropic;
|
||||
using symbol_shorthand::M;
|
||||
using symbol_shorthand::X;
|
||||
|
||||
class Robot {
|
||||
size_t K_;
|
||||
DiscreteKeys modes_;
|
||||
HybridNonlinearFactorGraph nonlinearFactorGraph_;
|
||||
HybridGaussianFactorGraph linearizedFactorGraph_;
|
||||
Values linearizationPoint_;
|
||||
|
||||
public:
|
||||
Robot(size_t K, std::vector<double> measurements) {
|
||||
// Create DiscreteKeys for binary K modes
|
||||
for (size_t k = 0; k < K; k++) {
|
||||
modes_.emplace_back(M(k), 2);
|
||||
}
|
||||
|
||||
////// Create hybrid factor graph.
|
||||
// Add measurement factors
|
||||
auto measurement_noise = noiseModel::Isotropic::Sigma(1, 1.0);
|
||||
for (size_t k = 0; k < K; k++) {
|
||||
nonlinearFactorGraph_.emplace_nonlinear<PriorFactor<double>>(
|
||||
X(k), measurements.at(k), measurement_noise);
|
||||
}
|
||||
|
||||
// 2 noise models where moving has a higher covariance.
|
||||
auto still_noise_model = noiseModel::Isotropic::Sigma(1, 1e-2);
|
||||
auto moving_noise_model = noiseModel::Isotropic::Sigma(1, 1e2);
|
||||
|
||||
// Add "motion models".
|
||||
// The idea is that the robot has a higher "freedom" (aka higher covariance)
|
||||
// for movement
|
||||
using MotionModel = BetweenFactor<double>;
|
||||
|
||||
for (size_t k = 1; k < K; k++) {
|
||||
KeyVector keys = {X(k - 1), X(k)};
|
||||
DiscreteKeys dkeys{modes_[k - 1]};
|
||||
auto still = boost::make_shared<MotionModel>(X(k - 1), X(k), 0.0,
|
||||
still_noise_model),
|
||||
moving = boost::make_shared<MotionModel>(X(k - 1), X(k), 0.0,
|
||||
moving_noise_model);
|
||||
std::vector<boost::shared_ptr<MotionModel>> components = {still, moving};
|
||||
nonlinearFactorGraph_.emplace_hybrid<MixtureFactor>(keys, dkeys,
|
||||
components);
|
||||
}
|
||||
|
||||
// Create the linearization point.
|
||||
for (size_t k = 0; k < K; k++) {
|
||||
linearizationPoint_.insert<double>(X(k), static_cast<double>(k + 1));
|
||||
}
|
||||
|
||||
linearizedFactorGraph_ =
|
||||
nonlinearFactorGraph_.linearize(linearizationPoint_);
|
||||
}
|
||||
|
||||
void print() const {
|
||||
nonlinearFactorGraph_.print();
|
||||
linearizationPoint_.print();
|
||||
linearizedFactorGraph_.print();
|
||||
}
|
||||
|
||||
HybridValues optimize() const {
|
||||
Ordering hybridOrdering = linearizedFactorGraph_.getHybridOrdering();
|
||||
HybridBayesNet::shared_ptr hybridBayesNet =
|
||||
linearizedFactorGraph_.eliminateSequential(hybridOrdering);
|
||||
|
||||
HybridValues delta = hybridBayesNet->optimize();
|
||||
return delta;
|
||||
}
|
||||
};
|
||||
|
||||
/* ****************************************************************************/
|
||||
/**
|
||||
* I am trying to test if setting the hybrid mixture components to just differ
|
||||
* in covariance makes sense. This is done by setting the "moving" covariance to
|
||||
* be 1e2 while the "still" covariance is 1e-2.
|
||||
*/
|
||||
TEST(Estimation, StillRobot) {
|
||||
size_t K = 2;
|
||||
vector<double> measurements = {0, 0};
|
||||
|
||||
Robot robot(K, measurements);
|
||||
// robot.print();
|
||||
|
||||
HybridValues delta = robot.optimize();
|
||||
|
||||
delta.continuous().print("delta update:");
|
||||
|
||||
if (delta.discrete()[M(0)] == 0) {
|
||||
std::cout << "The robot is stationary!" << std::endl;
|
||||
} else {
|
||||
std::cout << "The robot has moved!" << std::endl;
|
||||
}
|
||||
EXPECT_LONGS_EQUAL(0, delta.discrete()[M(0)]);
|
||||
}
|
||||
|
||||
TEST(Estimation, MovingRobot) {
|
||||
size_t K = 2;
|
||||
vector<double> measurements = {0, 2};
|
||||
|
||||
Robot robot(K, measurements);
|
||||
// robot.print();
|
||||
|
||||
HybridValues delta = robot.optimize();
|
||||
|
||||
delta.continuous().print("delta update:");
|
||||
|
||||
if (delta.discrete()[M(0)] == 0) {
|
||||
std::cout << "The robot is stationary!" << std::endl;
|
||||
} else {
|
||||
std::cout << "The robot has moved!" << std::endl;
|
||||
}
|
||||
EXPECT_LONGS_EQUAL(1, delta.discrete()[M(0)]);
|
||||
}
|
||||
|
||||
/// A robot with a single leg.
|
||||
class SingleLeg {
|
||||
size_t K_;
|
||||
DiscreteKeys modes_;
|
||||
HybridNonlinearFactorGraph nonlinearFactorGraph_;
|
||||
HybridGaussianFactorGraph linearizedFactorGraph_;
|
||||
Values linearizationPoint_;
|
||||
|
||||
public:
|
||||
SingleLeg(size_t K, std::vector<double> measurements) {
|
||||
// Create DiscreteKeys for binary K modes
|
||||
for (size_t k = 0; k < K; k++) {
|
||||
modes_.emplace_back(M(k), 2);
|
||||
}
|
||||
|
||||
////// Create hybrid factor graph.
|
||||
// Add measurement factors
|
||||
auto measurement_noise = noiseModel::Isotropic::Sigma(1, 1.0);
|
||||
for (size_t k = 0; k < K; k++) {
|
||||
nonlinearFactorGraph_.emplace_nonlinear<PriorFactor<double>>(
|
||||
X(k), measurements.at(k), measurement_noise);
|
||||
}
|
||||
|
||||
// 2 noise models where moving has a higher covariance.
|
||||
auto still_noise_model = noiseModel::Isotropic::Sigma(1, 1e-2);
|
||||
auto moving_noise_model = noiseModel::Isotropic::Sigma(1, 1e2);
|
||||
|
||||
// Add "motion models".
|
||||
// The idea is that the robot has a higher "freedom" (aka higher covariance)
|
||||
// for movement
|
||||
using MotionModel = BetweenFactor<double>;
|
||||
|
||||
for (size_t k = 1; k < K; k++) {
|
||||
KeyVector keys = {X(k - 1), X(k)};
|
||||
DiscreteKeys dkeys{modes_[k - 1]};
|
||||
auto still = boost::make_shared<MotionModel>(X(k - 1), X(k), 0.0,
|
||||
still_noise_model),
|
||||
moving = boost::make_shared<MotionModel>(X(k - 1), X(k), 0.0,
|
||||
moving_noise_model);
|
||||
std::vector<boost::shared_ptr<MotionModel>> components = {still, moving};
|
||||
nonlinearFactorGraph_.emplace_hybrid<MixtureFactor>(keys, dkeys,
|
||||
components);
|
||||
}
|
||||
|
||||
// Create the linearization point.
|
||||
for (size_t k = 0; k < K; k++) {
|
||||
linearizationPoint_.insert<double>(X(k), static_cast<double>(k + 1));
|
||||
}
|
||||
|
||||
linearizedFactorGraph_ =
|
||||
nonlinearFactorGraph_.linearize(linearizationPoint_);
|
||||
}
|
||||
|
||||
void print() const {
|
||||
nonlinearFactorGraph_.print();
|
||||
linearizationPoint_.print();
|
||||
linearizedFactorGraph_.print();
|
||||
}
|
||||
|
||||
HybridValues optimize() const {
|
||||
Ordering hybridOrdering = linearizedFactorGraph_.getHybridOrdering();
|
||||
HybridBayesNet::shared_ptr hybridBayesNet =
|
||||
linearizedFactorGraph_.eliminateSequential(hybridOrdering);
|
||||
|
||||
HybridValues delta = hybridBayesNet->optimize();
|
||||
return delta;
|
||||
}
|
||||
};
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main() {
|
||||
TestResult tr;
|
||||
return TestRegistry::runAllTests(tr);
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
Loading…
Reference in New Issue