Merge pull request #1853 from borglab/feature/bougie_constructors

More refactoring
release/4.3a0
Frank Dellaert 2024-09-29 11:43:42 -07:00 committed by GitHub
commit ffb282931c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
15 changed files with 316 additions and 275 deletions

View File

@ -44,9 +44,14 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
/// @name Standard Constructors
/// @{
/** Construct empty Bayes net */
/// Construct empty Bayes net.
HybridBayesNet() = default;
/// Constructor that takes an initializer list of shared pointers.
HybridBayesNet(
std::initializer_list<HybridConditional::shared_ptr> conditionals)
: Base(conditionals) {}
/// @}
/// @name Testable
/// @{

View File

@ -26,21 +26,49 @@
#include <gtsam/inference/Conditional-inst.h>
#include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/JacobianFactor.h>
#include <cstddef>
namespace gtsam {
/* *******************************************************************************/
struct HybridGaussianConditional::ConstructorHelper {
struct HybridGaussianConditional::Helper {
std::optional<size_t> nrFrontals;
HybridGaussianFactor::FactorValuePairs pairs;
FactorValuePairs pairs;
Conditionals conditionals;
double minNegLogConstant;
/// Compute all variables needed for the private constructor below.
ConstructorHelper(const Conditionals &conditionals)
: minNegLogConstant(std::numeric_limits<double>::infinity()) {
auto func = [this](const GaussianConditional::shared_ptr &c)
-> GaussianFactorValuePair {
using GC = GaussianConditional;
using P = std::vector<std::pair<Vector, double>>;
/// Construct from a vector of mean and sigma pairs, plus extra args.
template <typename... Args>
explicit Helper(const DiscreteKey &mode, const P &p, Args &&...args) {
nrFrontals = 1;
minNegLogConstant = std::numeric_limits<double>::infinity();
std::vector<GaussianFactorValuePair> fvs;
std::vector<GC::shared_ptr> gcs;
fvs.reserve(p.size());
gcs.reserve(p.size());
for (auto &&[mean, sigma] : p) {
auto gaussianConditional =
GC::sharedMeanAndStddev(std::forward<Args>(args)..., mean, sigma);
double value = gaussianConditional->negLogConstant();
minNegLogConstant = std::min(minNegLogConstant, value);
fvs.emplace_back(gaussianConditional, value);
gcs.push_back(gaussianConditional);
}
conditionals = Conditionals({mode}, gcs);
pairs = FactorValuePairs({mode}, fvs);
}
/// Construct from tree of GaussianConditionals.
explicit Helper(const Conditionals &conditionals)
: conditionals(conditionals),
minNegLogConstant(std::numeric_limits<double>::infinity()) {
auto func = [this](const GC::shared_ptr &c) -> GaussianFactorValuePair {
double value = 0.0;
if (c) {
if (!nrFrontals.has_value()) {
@ -51,38 +79,56 @@ struct HybridGaussianConditional::ConstructorHelper {
}
return {std::dynamic_pointer_cast<GaussianFactor>(c), value};
};
pairs = HybridGaussianFactor::FactorValuePairs(conditionals, func);
pairs = FactorValuePairs(conditionals, func);
if (!nrFrontals.has_value()) {
throw std::runtime_error(
"HybridGaussianConditional: need at least one frontal variable.");
"HybridGaussianConditional: need at least one frontal variable. "
"Provided conditionals do not contain any frontal variables.");
}
}
};
/* *******************************************************************************/
HybridGaussianConditional::HybridGaussianConditional(
const DiscreteKeys &discreteParents,
const HybridGaussianConditional::Conditionals &conditionals,
const ConstructorHelper &helper)
const DiscreteKeys &discreteParents, const Helper &helper)
: BaseFactor(discreteParents, helper.pairs),
BaseConditional(*helper.nrFrontals),
conditionals_(conditionals),
conditionals_(helper.conditionals),
negLogConstant_(helper.minNegLogConstant) {}
/* *******************************************************************************/
HybridGaussianConditional::HybridGaussianConditional(
const DiscreteKeys &discreteParents,
const HybridGaussianConditional::Conditionals &conditionals)
: HybridGaussianConditional(discreteParents, conditionals,
ConstructorHelper(conditionals)) {}
/* *******************************************************************************/
HybridGaussianConditional::HybridGaussianConditional(
const DiscreteKey &discreteParent,
const std::vector<GaussianConditional::shared_ptr> &conditionals)
: HybridGaussianConditional(DiscreteKeys{discreteParent},
Conditionals({discreteParent}, conditionals)) {}
HybridGaussianConditional::HybridGaussianConditional(
const DiscreteKey &discreteParent, Key key, //
const std::vector<std::pair<Vector, double>> &parameters)
: HybridGaussianConditional(DiscreteKeys{discreteParent},
Helper(discreteParent, parameters, key)) {}
HybridGaussianConditional::HybridGaussianConditional(
const DiscreteKey &discreteParent, Key key, //
const Matrix &A, Key parent,
const std::vector<std::pair<Vector, double>> &parameters)
: HybridGaussianConditional(
DiscreteKeys{discreteParent},
Helper(discreteParent, parameters, key, A, parent)) {}
HybridGaussianConditional::HybridGaussianConditional(
const DiscreteKey &discreteParent, Key key, //
const Matrix &A1, Key parent1, const Matrix &A2, Key parent2,
const std::vector<std::pair<Vector, double>> &parameters)
: HybridGaussianConditional(
DiscreteKeys{discreteParent},
Helper(discreteParent, parameters, key, A1, parent1, A2, parent2)) {}
HybridGaussianConditional::HybridGaussianConditional(
const DiscreteKeys &discreteParents,
const HybridGaussianConditional::Conditionals &conditionals)
: HybridGaussianConditional(discreteParents, Helper(conditionals)) {}
/* *******************************************************************************/
const HybridGaussianConditional::Conditionals &
HybridGaussianConditional::conditionals() const {

View File

@ -87,6 +87,49 @@ class GTSAM_EXPORT HybridGaussianConditional
const DiscreteKey &discreteParent,
const std::vector<GaussianConditional::shared_ptr> &conditionals);
/**
* @brief Constructs a HybridGaussianConditional with means mu_i and
* standard deviations sigma_i.
*
* @param discreteParent The discrete parent or "mode" key.
* @param key The key for this conditional variable.
* @param parameters A vector of pairs (mu_i, sigma_i).
*/
HybridGaussianConditional(
const DiscreteKey &discreteParent, Key key,
const std::vector<std::pair<Vector, double>> &parameters);
/**
* @brief Constructs a HybridGaussianConditional with conditional means
* A × parent + b_i and standard deviations sigma_i.
*
* @param discreteParent The discrete parent or "mode" key.
* @param key The key for this conditional variable.
* @param A The matrix A.
* @param parent The key of the parent variable.
* @param parameters A vector of pairs (b_i, sigma_i).
*/
HybridGaussianConditional(
const DiscreteKey &discreteParent, Key key, const Matrix &A, Key parent,
const std::vector<std::pair<Vector, double>> &parameters);
/**
* @brief Constructs a HybridGaussianConditional with conditional means
* A1 × parent1 + A2 × parent2 + b_i and standard deviations sigma_i.
*
* @param discreteParent The discrete parent or "mode" key.
* @param key The key for this conditional variable.
* @param A1 The first matrix.
* @param parent1 The key of the first parent variable.
* @param A2 The second matrix.
* @param parent2 The key of the second parent variable.
* @param parameters A vector of pairs (b_i, sigma_i).
*/
HybridGaussianConditional(
const DiscreteKey &discreteParent, Key key, //
const Matrix &A1, Key parent1, const Matrix &A2, Key parent2,
const std::vector<std::pair<Vector, double>> &parameters);
/**
* @brief Construct from multiple discrete keys and conditional tree.
*
@ -183,13 +226,11 @@ class GTSAM_EXPORT HybridGaussianConditional
private:
/// Helper struct for private constructor.
struct ConstructorHelper;
struct Helper;
/// Private constructor that uses helper struct above.
HybridGaussianConditional(
const DiscreteKeys &discreteParents,
const HybridGaussianConditional::Conditionals &conditionals,
const ConstructorHelper &helper);
HybridGaussianConditional(const DiscreteKeys &discreteParents,
const Helper &helper);
/// Convert to a DecisionTree of Gaussian factor graphs.
GaussianFactorGraphTree asGaussianFactorGraphTree() const;

View File

@ -366,18 +366,18 @@ static std::shared_ptr<Factor> createHybridGaussianFactor(
return std::make_shared<HybridGaussianFactor>(discreteSeparator, newFactors);
}
static std::pair<HybridConditional::shared_ptr, std::shared_ptr<Factor>>
hybridElimination(const HybridGaussianFactorGraph &factors,
const Ordering &frontalKeys,
const std::set<DiscreteKey> &discreteSeparatorSet) {
// NOTE: since we use the special JunctionTree,
// only possibility is continuous conditioned on discrete.
DiscreteKeys discreteSeparator(discreteSeparatorSet.begin(),
discreteSeparatorSet.end());
/* *******************************************************************************/
std::pair<HybridConditional::shared_ptr, std::shared_ptr<Factor>>
HybridGaussianFactorGraph::eliminate(const Ordering &keys) const {
// Since we eliminate all continuous variables first,
// the discrete separator will be *all* the discrete keys.
const std::set<DiscreteKey> keysForDiscreteVariables = discreteKeys();
DiscreteKeys discreteSeparator(keysForDiscreteVariables.begin(),
keysForDiscreteVariables.end());
// Collect all the factors to create a set of Gaussian factor graphs in a
// decision tree indexed by all discrete keys involved.
GaussianFactorGraphTree factorGraphTree = factors.assembleGraphTree();
GaussianFactorGraphTree factorGraphTree = assembleGraphTree();
// Convert factor graphs with a nullptr to an empty factor graph.
// This is done after assembly since it is non-trivial to keep track of which
@ -392,7 +392,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors,
}
// Expensive elimination of product factor.
auto result = EliminatePreferCholesky(graph, frontalKeys);
auto result = EliminatePreferCholesky(graph, keys);
// Record whether there any continuous variables left
someContinuousLeft |= !result.second->empty();
@ -436,7 +436,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors,
*/
std::pair<HybridConditional::shared_ptr, std::shared_ptr<Factor>> //
EliminateHybrid(const HybridGaussianFactorGraph &factors,
const Ordering &frontalKeys) {
const Ordering &keys) {
// NOTE: Because we are in the Conditional Gaussian regime there are only
// a few cases:
// 1. continuous variable, make a hybrid Gaussian conditional if there are
@ -510,20 +510,13 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors,
if (only_discrete) {
// Case 1: we are only dealing with discrete
return discreteElimination(factors, frontalKeys);
return discreteElimination(factors, keys);
} else if (only_continuous) {
// Case 2: we are only dealing with continuous
return continuousElimination(factors, frontalKeys);
return continuousElimination(factors, keys);
} else {
// Case 3: We are now in the hybrid land!
KeySet frontalKeysSet(frontalKeys.begin(), frontalKeys.end());
// Find all discrete keys.
// Since we eliminate all continuous variables first,
// the discrete separator will be *all* the discrete keys.
std::set<DiscreteKey> discreteSeparator = factors.discreteKeys();
return hybridElimination(factors, frontalKeys, discreteSeparator);
return factors.eliminate(keys);
}
}

View File

@ -217,6 +217,14 @@ class GTSAM_EXPORT HybridGaussianFactorGraph
*/
GaussianFactorGraphTree assembleGraphTree() const;
/**
* @brief Eliminate the given continuous keys.
*
* @param keys The continuous keys to eliminate.
* @return The conditional on the keys and a factor on the separator.
*/
std::pair<std::shared_ptr<HybridConditional>, std::shared_ptr<Factor>>
eliminate(const Ordering& keys) const;
/// @}
/// Get the GaussianFactorGraph at a given discrete assignment.

View File

@ -24,17 +24,16 @@
#include <gtsam/hybrid/HybridNonlinearFactor.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/linear/GaussianFactor.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/JacobianFactor.h>
#include <gtsam/linear/NoiseModel.h>
#include <gtsam/nonlinear/NonlinearFactor.h>
#include <gtsam/nonlinear/PriorFactor.h>
#include <gtsam/slam/BetweenFactor.h>
#include <vector>
#include "gtsam/linear/GaussianFactor.h"
#include "gtsam/linear/GaussianFactorGraph.h"
#include "gtsam/nonlinear/NonlinearFactor.h"
#pragma once
namespace gtsam {

View File

@ -41,12 +41,12 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1,
HybridBayesNet bayesNet;
// Create hybrid Gaussian factor z_i = x0 + noise for each measurement.
std::vector<std::pair<Vector, double>> measurementModels{{Z_1x1, 0.5},
{Z_1x1, 3.0}};
for (size_t i = 0; i < num_measurements; i++) {
const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode;
std::vector<GaussianConditional::shared_ptr> conditionals{
GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 0.5),
GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 3)};
bayesNet.emplace_shared<HybridGaussianConditional>(mode_i, conditionals);
bayesNet.emplace_shared<HybridGaussianConditional>(mode_i, Z(i), I_1x1,
X(0), measurementModels);
}
// Create prior on X(0).

View File

@ -43,26 +43,6 @@ const DiscreteValues m1Assignment{{M(0), 1}};
DiscreteConditional::shared_ptr mixing =
std::make_shared<DiscreteConditional>(m, "60/40");
/**
* Create a simple Gaussian Mixture Model represented as p(z|m)P(m)
* where m is a discrete variable and z is a continuous variable.
* The "mode" m is binary and depending on m, we have 2 different means
* μ1 and μ2 for the Gaussian density p(z|m).
*/
HybridBayesNet GaussianMixtureModel(double mu0, double mu1, double sigma0,
double sigma1) {
HybridBayesNet hbn;
auto model0 = noiseModel::Isotropic::Sigma(1, sigma0);
auto model1 = noiseModel::Isotropic::Sigma(1, sigma1);
auto c0 = std::make_shared<GaussianConditional>(Z(0), Vector1(mu0), I_1x1,
model0),
c1 = std::make_shared<GaussianConditional>(Z(0), Vector1(mu1), I_1x1,
model1);
hbn.emplace_shared<HybridGaussianConditional>(m, std::vector{c0, c1});
hbn.push_back(mixing);
return hbn;
}
/// Gaussian density function
double Gaussian(double mu, double sigma, double z) {
return exp(-0.5 * pow((z - mu) / sigma, 2)) / sqrt(2 * M_PI * sigma * sigma);
@ -99,11 +79,16 @@ TEST(GaussianMixture, GaussianMixtureModel) {
double mu0 = 1.0, mu1 = 3.0;
double sigma = 2.0;
auto hbn = GaussianMixtureModel(mu0, mu1, sigma, sigma);
// Create a Gaussian mixture model p(z|m) with same sigma.
HybridBayesNet gmm;
std::vector<std::pair<Vector, double>> parameters{{Vector1(mu0), sigma},
{Vector1(mu1), sigma}};
gmm.emplace_shared<HybridGaussianConditional>(m, Z(0), parameters);
gmm.push_back(mixing);
// At the halfway point between the means, we should get P(m|z)=0.5
double midway = mu1 - mu0;
auto pMid = SolveHBN(hbn, midway);
auto pMid = SolveHBN(gmm, midway);
EXPECT(assert_equal(DiscreteConditional(m, "60/40"), pMid));
// Everywhere else, the result should be a sigmoid.
@ -112,7 +97,7 @@ TEST(GaussianMixture, GaussianMixtureModel) {
const double expected = prob_m_z(mu0, mu1, sigma, sigma, z);
// Workflow 1: convert HBN to HFG and solve
auto posterior1 = SolveHBN(hbn, z);
auto posterior1 = SolveHBN(gmm, z);
EXPECT_DOUBLES_EQUAL(expected, posterior1(m1Assignment), 1e-8);
// Workflow 2: directly specify HFG and solve
@ -133,12 +118,17 @@ TEST(GaussianMixture, GaussianMixtureModel2) {
double mu0 = 1.0, mu1 = 3.0;
double sigma0 = 8.0, sigma1 = 4.0;
auto hbn = GaussianMixtureModel(mu0, mu1, sigma0, sigma1);
// Create a Gaussian mixture model p(z|m) with same sigma.
HybridBayesNet gmm;
std::vector<std::pair<Vector, double>> parameters{{Vector1(mu0), sigma0},
{Vector1(mu1), sigma1}};
gmm.emplace_shared<HybridGaussianConditional>(m, Z(0), parameters);
gmm.push_back(mixing);
// We get zMax=3.1333 by finding the maximum value of the function, at which
// point the mode m==1 is about twice as probable as m==0.
double zMax = 3.133;
auto pMax = SolveHBN(hbn, zMax);
auto pMax = SolveHBN(gmm, zMax);
EXPECT(assert_equal(DiscreteConditional(m, "42/58"), pMax, 1e-4));
// Everywhere else, the result should be a bell curve like function.
@ -147,7 +137,7 @@ TEST(GaussianMixture, GaussianMixtureModel2) {
const double expected = prob_m_z(mu0, mu1, sigma0, sigma1, z);
// Workflow 1: convert HBN to HFG and solve
auto posterior1 = SolveHBN(hbn, z);
auto posterior1 = SolveHBN(gmm, z);
EXPECT_DOUBLES_EQUAL(expected, posterior1(m1Assignment), 1e-8);
// Workflow 2: directly specify HFG and solve

View File

@ -20,11 +20,11 @@
#include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridBayesTree.h>
#include <gtsam/hybrid/HybridConditional.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include "Switching.h"
#include "TinyHybridExample.h"
#include "gtsam/nonlinear/NonlinearFactor.h"
// Include for test suite
#include <CppUnitLite/TestHarness.h>
@ -32,7 +32,6 @@
using namespace std;
using namespace gtsam;
using noiseModel::Isotropic;
using symbol_shorthand::M;
using symbol_shorthand::X;
using symbol_shorthand::Z;
@ -91,40 +90,52 @@ TEST(HybridBayesNet, Tiny) {
EXPECT_DOUBLES_EQUAL(ratio[0], ratio[1], 1e-8);
}
/* ****************************************************************************/
// Hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia).
namespace different_sigmas {
const auto gc = GaussianConditional::sharedMeanAndStddev(X(0), 2 * I_1x1, X(1),
Vector1(-4.0), 5.0);
const std::vector<std::pair<Vector, double>> parms{{Vector1(5), 2.0},
{Vector1(2), 3.0}};
const auto hgc = std::make_shared<HybridGaussianConditional>(Asia, X(1), parms);
const auto prior = std::make_shared<DiscreteConditional>(Asia, "99/1");
auto wrap = [](const auto& c) {
return std::make_shared<HybridConditional>(c);
};
const HybridBayesNet bayesNet{wrap(gc), wrap(hgc), wrap(prior)};
// Create values at which to evaluate.
HybridValues values{{{X(0), Vector1(-6)}, {X(1), Vector1(1)}}, {{asiaKey, 0}}};
} // namespace different_sigmas
/* ****************************************************************************/
// Test evaluate for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia).
TEST(HybridBayesNet, evaluateHybrid) {
const auto continuousConditional = GaussianConditional::sharedMeanAndStddev(
X(0), 2 * I_1x1, X(1), Vector1(-4.0), 5.0);
using namespace different_sigmas;
const SharedDiagonal model0 = noiseModel::Diagonal::Sigmas(Vector1(2.0)),
model1 = noiseModel::Diagonal::Sigmas(Vector1(3.0));
const auto conditional0 = std::make_shared<GaussianConditional>(
X(1), Vector1::Constant(5), I_1x1, model0),
conditional1 = std::make_shared<GaussianConditional>(
X(1), Vector1::Constant(2), I_1x1, model1);
// Create hybrid Bayes net.
HybridBayesNet bayesNet;
bayesNet.push_back(continuousConditional);
bayesNet.emplace_shared<HybridGaussianConditional>(
Asia, std::vector{conditional0, conditional1});
bayesNet.emplace_shared<DiscreteConditional>(Asia, "99/1");
// Create values at which to evaluate.
HybridValues values;
values.insert(asiaKey, 0);
values.insert(X(0), Vector1(-6));
values.insert(X(1), Vector1(1));
const double conditionalProbability =
continuousConditional->evaluate(values.continuous());
const double mixtureProbability = conditional0->evaluate(values.continuous());
const double conditionalProbability = gc->evaluate(values.continuous());
const double mixtureProbability = hgc->evaluate(values);
EXPECT_DOUBLES_EQUAL(conditionalProbability * mixtureProbability * 0.99,
bayesNet.evaluate(values), 1e-9);
}
/* ****************************************************************************/
// Test error for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia).
TEST(HybridBayesNet, Error) {
using namespace different_sigmas;
AlgebraicDecisionTree<Key> actual = bayesNet.errorTree(values.continuous());
// Regression.
// Manually added all the error values from the 3 conditional types.
AlgebraicDecisionTree<Key> expected(
{Asia}, std::vector<double>{2.33005033585, 5.38619084965});
EXPECT(assert_equal(expected, actual));
}
/* ****************************************************************************/
// Test choosing an assignment of conditionals
TEST(HybridBayesNet, Choose) {
@ -154,45 +165,6 @@ TEST(HybridBayesNet, Choose) {
*gbn.at(3)));
}
/* ****************************************************************************/
// Test error for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia).
TEST(HybridBayesNet, Error) {
const auto continuousConditional = GaussianConditional::sharedMeanAndStddev(
X(0), 2 * I_1x1, X(1), Vector1(-4.0), 5.0);
const SharedDiagonal model0 = noiseModel::Diagonal::Sigmas(Vector1(2.0)),
model1 = noiseModel::Diagonal::Sigmas(Vector1(3.0));
const auto conditional0 = std::make_shared<GaussianConditional>(
X(1), Vector1::Constant(5), I_1x1, model0),
conditional1 = std::make_shared<GaussianConditional>(
X(1), Vector1::Constant(2), I_1x1, model1);
auto gm = std::make_shared<HybridGaussianConditional>(
Asia, std::vector{conditional0, conditional1});
// Create hybrid Bayes net.
HybridBayesNet bayesNet;
bayesNet.push_back(continuousConditional);
bayesNet.push_back(gm);
bayesNet.emplace_shared<DiscreteConditional>(Asia, "99/1");
// Create values at which to evaluate.
HybridValues values;
values.insert(asiaKey, 0);
values.insert(X(0), Vector1(-6));
values.insert(X(1), Vector1(1));
AlgebraicDecisionTree<Key> actual_errors =
bayesNet.errorTree(values.continuous());
// Regression.
// Manually added all the error values from the 3 conditional types.
AlgebraicDecisionTree<Key> expected_errors(
{Asia}, std::vector<double>{2.33005033585, 5.38619084965});
EXPECT(assert_equal(expected_errors, actual_errors));
}
/* ****************************************************************************/
// Test Bayes net optimize
TEST(HybridBayesNet, OptimizeAssignment) {
@ -444,6 +416,57 @@ TEST(HybridBayesNet, Sampling) {
// num_samples)));
}
/* ****************************************************************************/
// Test hybrid gaussian factor graph errorTree when
// there is a HybridConditional in the graph
TEST(HybridBayesNet, ErrorTreeWithConditional) {
using symbol_shorthand::F;
Key z0 = Z(0), f01 = F(0);
Key x0 = X(0), x1 = X(1);
HybridBayesNet hbn;
auto prior_model = noiseModel::Isotropic::Sigma(1, 1e-1);
auto measurement_model = noiseModel::Isotropic::Sigma(1, 2.0);
// Set a prior P(x0) at x0=0
hbn.emplace_shared<GaussianConditional>(x0, Vector1(0.0), I_1x1, prior_model);
// Add measurement P(z0 | x0)
hbn.emplace_shared<GaussianConditional>(z0, Vector1(0.0), -I_1x1, x0, I_1x1,
measurement_model);
// Add hybrid motion model
double mu = 0.0;
double sigma0 = 1e2, sigma1 = 1e-2;
auto model0 = noiseModel::Isotropic::Sigma(1, sigma0);
auto model1 = noiseModel::Isotropic::Sigma(1, sigma1);
auto c0 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1,
x0, -I_1x1, model0),
c1 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1,
x0, -I_1x1, model1);
DiscreteKey m1(M(2), 2);
hbn.emplace_shared<HybridGaussianConditional>(m1, std::vector{c0, c1});
// Discrete uniform prior.
hbn.emplace_shared<DiscreteConditional>(m1, "0.5/0.5");
VectorValues given;
given.insert(z0, Vector1(0.0));
given.insert(f01, Vector1(0.0));
auto gfg = hbn.toFactorGraph(given);
VectorValues vv;
vv.insert(x0, Vector1(1.0));
vv.insert(x1, Vector1(2.0));
AlgebraicDecisionTree<Key> errorTree = gfg.errorTree(vv);
// regression
AlgebraicDecisionTree<Key> expected(m1, 59.335390372, 5050.125);
EXPECT(assert_equal(expected, errorTree, 1e-9));
}
/* ************************************************************************* */
int main() {
TestResult tr;

View File

@ -19,6 +19,7 @@
#include <gtsam/geometry/Pose2.h>
#include <gtsam/geometry/Pose3.h>
#include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridNonlinearFactor.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
#include <gtsam/hybrid/HybridNonlinearISAM.h>
@ -39,7 +40,6 @@
#include <bitset>
#include "Switching.h"
#include "gtsam/nonlinear/NonlinearFactor.h"
using namespace std;
using namespace gtsam;
@ -572,12 +572,10 @@ TEST(HybridEstimation, ModeSelection) {
bn.push_back(
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1));
std::vector<GaussianConditional::shared_ptr> conditionals{
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1),
Z_1x1, noise_loose),
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1),
Z_1x1, noise_tight)};
bn.emplace_shared<HybridGaussianConditional>(mode, conditionals);
std::vector<std::pair<Vector, double>> parameters{{Z_1x1, noise_loose},
{Z_1x1, noise_tight}};
bn.emplace_shared<HybridGaussianConditional>(mode, Z(0), I_1x1, X(0), -I_1x1,
X(1), parameters);
VectorValues vv;
vv.insert(Z(0), Z_1x1);
@ -604,12 +602,10 @@ TEST(HybridEstimation, ModeSelection2) {
bn.push_back(
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1));
std::vector<GaussianConditional::shared_ptr> conditionals{
GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1),
Z_3x1, noise_loose),
GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1),
Z_3x1, noise_tight)};
bn.emplace_shared<HybridGaussianConditional>(mode, conditionals);
std::vector<std::pair<Vector, double>> parameters{{Z_3x1, noise_loose},
{Z_3x1, noise_tight}};
bn.emplace_shared<HybridGaussianConditional>(mode, Z(0), I_3x3, X(0), -I_3x3,
X(1), parameters);
VectorValues vv;
vv.insert(Z(0), Z_3x1);

View File

@ -18,6 +18,7 @@
#include <CppUnitLite/Test.h>
#include <CppUnitLite/TestHarness.h>
#include <gtsam/base/TestableAssertions.h>
#include <gtsam/base/Vector.h>
#include <gtsam/discrete/DecisionTreeFactor.h>
#include <gtsam/discrete/DiscreteKey.h>
#include <gtsam/discrete/DiscreteValues.h>
@ -42,6 +43,7 @@
#include <functional>
#include <iostream>
#include <iterator>
#include <memory>
#include <numeric>
#include <vector>
@ -61,6 +63,8 @@ using gtsam::symbol_shorthand::Z;
// Set up sampling
std::mt19937_64 kRng(42);
static const DiscreteKey m1(M(1), 2);
/* ************************************************************************* */
TEST(HybridGaussianFactorGraph, Creation) {
HybridConditional conditional;
@ -98,11 +102,9 @@ TEST(HybridGaussianFactorGraph, EliminateMultifrontal) {
// Test multifrontal elimination
HybridGaussianFactorGraph hfg;
DiscreteKey m(M(1), 2);
// Add priors on x0 and c1
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
hfg.add(DecisionTreeFactor(m, {2, 8}));
hfg.add(DecisionTreeFactor(m1, {2, 8}));
Ordering ordering;
ordering.push_back(X(0));
@ -120,6 +122,25 @@ std::vector<GaussianFactor::shared_ptr> components(Key key) {
}
} // namespace two
/* ************************************************************************* */
TEST(HybridGaussianFactorGraph, hybridEliminationOneFactor) {
HybridGaussianFactorGraph hfg;
hfg.add(HybridGaussianFactor(m1, two::components(X(1))));
auto result = hfg.eliminate({X(1)});
// Check that we have a valid Gaussian conditional.
auto hgc = result.first->asHybrid();
CHECK(hgc);
const HybridValues values{{{X(1), Z_3x1}}, {{M(1), 1}}};
EXPECT(HybridConditional::CheckInvariants(*result.first, values));
// Check that factor is discrete and correct
auto factor = std::dynamic_pointer_cast<DecisionTreeFactor>(result.second);
CHECK(factor);
EXPECT(assert_equal(DecisionTreeFactor{m1, "1 1"}, *factor));
}
/* ************************************************************************* */
TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) {
HybridGaussianFactorGraph hfg;
@ -131,7 +152,6 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) {
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
// Add a hybrid gaussian factor ϕ(x1, c1)
DiscreteKey m1(M(1), 2);
hfg.add(HybridGaussianFactor(m1, two::components(X(1))));
auto result = hfg.eliminateSequential();
@ -148,8 +168,6 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) {
TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) {
HybridGaussianFactorGraph hfg;
DiscreteKey m1(M(1), 2);
// Add prior on x0
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
// Add factor between x0 and x1
@ -172,8 +190,6 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) {
TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) {
HybridGaussianFactorGraph hfg;
DiscreteKey m1(M(1), 2);
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
@ -196,17 +212,15 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) {
TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) {
HybridGaussianFactorGraph hfg;
DiscreteKey m(M(1), 2);
// Prior on x0
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
// Factor between x0-x1
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
// Hybrid factor P(x1|c1)
hfg.add(HybridGaussianFactor(m, two::components(X(1))));
hfg.add(HybridGaussianFactor(m1, two::components(X(1))));
// Prior factor on c1
hfg.add(DecisionTreeFactor(m, {2, 8}));
hfg.add(DecisionTreeFactor(m1, {2, 8}));
// Get a constrained ordering keeping c1 last
auto ordering_full = HybridOrdering(hfg);
@ -228,20 +242,16 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1));
{
hfg.add(HybridGaussianFactor({M(0), 2}, two::components(X(0))));
hfg.add(HybridGaussianFactor({M(1), 2}, two::components(X(2))));
}
hfg.add(HybridGaussianFactor({M(0), 2}, two::components(X(0))));
hfg.add(HybridGaussianFactor({M(1), 2}, two::components(X(2))));
hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"));
hfg.add(JacobianFactor(X(3), I_3x3, X(4), -I_3x3, Z_3x1));
hfg.add(JacobianFactor(X(4), I_3x3, X(5), -I_3x3, Z_3x1));
{
hfg.add(HybridGaussianFactor({M(3), 2}, two::components(X(3))));
hfg.add(HybridGaussianFactor({M(2), 2}, two::components(X(5))));
}
hfg.add(HybridGaussianFactor({M(3), 2}, two::components(X(3))));
hfg.add(HybridGaussianFactor({M(2), 2}, two::components(X(5))));
auto ordering_full =
Ordering::ColamdConstrainedLast(hfg, {M(0), M(1), M(2), M(3)});
@ -521,17 +531,15 @@ TEST(HybridGaussianFactorGraph, DiscreteSelection) {
TEST(HybridGaussianFactorGraph, optimize) {
HybridGaussianFactorGraph hfg;
DiscreteKey c1(C(1), 2);
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
hfg.add(HybridGaussianFactor(c1, two::components(X(1))));
hfg.add(HybridGaussianFactor(m1, two::components(X(1))));
auto result = hfg.eliminateSequential();
HybridValues hv = result->optimize();
EXPECT(assert_equal(hv.atDiscrete(C(1)), int(0)));
EXPECT(assert_equal(hv.atDiscrete(M(1)), int(0)));
}
/* ************************************************************************* */
@ -620,57 +628,6 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) {
EXPECT(assert_equal(expected_probabilities, probabilities, 1e-7));
}
/* ****************************************************************************/
// Test hybrid gaussian factor graph errorTree when
// there is a HybridConditional in the graph
TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) {
using symbol_shorthand::F;
DiscreteKey m1(M(1), 2);
Key z0 = Z(0), f01 = F(0);
Key x0 = X(0), x1 = X(1);
HybridBayesNet hbn;
auto prior_model = noiseModel::Isotropic::Sigma(1, 1e-1);
auto measurement_model = noiseModel::Isotropic::Sigma(1, 2.0);
// Set a prior P(x0) at x0=0
hbn.emplace_shared<GaussianConditional>(x0, Vector1(0.0), I_1x1, prior_model);
// Add measurement P(z0 | x0)
hbn.emplace_shared<GaussianConditional>(z0, Vector1(0.0), -I_1x1, x0, I_1x1,
measurement_model);
// Add hybrid motion model
double mu = 0.0;
double sigma0 = 1e2, sigma1 = 1e-2;
auto model0 = noiseModel::Isotropic::Sigma(1, sigma0);
auto model1 = noiseModel::Isotropic::Sigma(1, sigma1);
auto c0 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1,
x0, -I_1x1, model0),
c1 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1,
x0, -I_1x1, model1);
hbn.emplace_shared<HybridGaussianConditional>(m1, std::vector{c0, c1});
// Discrete uniform prior.
hbn.emplace_shared<DiscreteConditional>(m1, "0.5/0.5");
VectorValues given;
given.insert(z0, Vector1(0.0));
given.insert(f01, Vector1(0.0));
auto gfg = hbn.toFactorGraph(given);
VectorValues vv;
vv.insert(x0, Vector1(1.0));
vv.insert(x1, Vector1(2.0));
AlgebraicDecisionTree<Key> errorTree = gfg.errorTree(vv);
// regression
AlgebraicDecisionTree<Key> expected(m1, 59.335390372, 5050.125);
EXPECT(assert_equal(expected, errorTree, 1e-9));
}
/* ****************************************************************************/
// Test hybrid gaussian factor graph errorTree during
// incremental operation
@ -842,23 +799,19 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) {
TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
const VectorValues measurements{{Z(0), Vector1(5.0)}};
// Create mode key: 1 is low-noise, 0 is high-noise.
const DiscreteKey mode{M(0), 2};
HybridBayesNet bn;
// mode-dependent: 1 is low-noise, 0 is high-noise.
// Create hybrid Gaussian factor z_0 = x0 + noise for each measurement.
std::vector<GaussianConditional::shared_ptr> conditionals{
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3),
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 0.5)};
auto gm = std::make_shared<HybridGaussianConditional>(mode, conditionals);
bn.push_back(gm);
std::vector<std::pair<Vector, double>> parms{{Z_1x1, 3}, {Z_1x1, 0.5}};
bn.emplace_shared<HybridGaussianConditional>(m1, Z(0), I_1x1, X(0), parms);
// Create prior on X(0).
bn.push_back(
GaussianConditional::sharedMeanAndStddev(X(0), Vector1(5.0), 0.5));
// Add prior on mode.
bn.emplace_shared<DiscreteConditional>(mode, "1/1");
// Add prior on m1.
bn.emplace_shared<DiscreteConditional>(m1, "1/1");
// bn.print();
auto fg = bn.toFactorGraph(measurements);
@ -878,10 +831,10 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
conditional1 = std::make_shared<GaussianConditional>(
X(0), Vector1(14.1421), I_1x1 * 2.82843);
expectedBayesNet.emplace_shared<HybridGaussianConditional>(
mode, std::vector{conditional0, conditional1});
m1, std::vector{conditional0, conditional1});
// Add prior on mode.
expectedBayesNet.emplace_shared<DiscreteConditional>(mode, "1/1");
// Add prior on m1.
expectedBayesNet.emplace_shared<DiscreteConditional>(m1, "1/1");
// Test elimination
const auto posterior = fg.eliminateSequential();
@ -956,32 +909,27 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) {
// NOTE: we add reverse topological so we can sample from the Bayes net.:
// Add measurements:
std::vector<std::pair<Vector, double>> measurementModels{{Z_1x1, 3},
{Z_1x1, 0.5}};
for (size_t t : {0, 1, 2}) {
// Create hybrid Gaussian factor on Z(t) conditioned on X(t) and mode N(t):
const auto noise_mode_t = DiscreteKey{N(t), 2};
std::vector<GaussianConditional::shared_ptr> conditionals{
GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), Z_1x1, 0.5),
GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), Z_1x1,
3.0)};
bn.emplace_shared<HybridGaussianConditional>(noise_mode_t, conditionals);
bn.emplace_shared<HybridGaussianConditional>(noise_mode_t, Z(t), I_1x1,
X(t), measurementModels);
// Create prior on discrete mode N(t):
bn.emplace_shared<DiscreteConditional>(noise_mode_t, "20/80");
}
// Add motion models:
// Add motion models. TODO(frank): why are they exactly the same?
std::vector<std::pair<Vector, double>> motionModels{{Z_1x1, 0.2},
{Z_1x1, 0.2}};
for (size_t t : {2, 1}) {
// Create hybrid Gaussian factor on X(t) conditioned on X(t-1)
// and mode M(t-1):
const auto motion_model_t = DiscreteKey{M(t), 2};
std::vector<GaussianConditional::shared_ptr> conditionals{
GaussianConditional::sharedMeanAndStddev(X(t), I_1x1, X(t - 1), Z_1x1,
0.2),
GaussianConditional::sharedMeanAndStddev(X(t), I_1x1, X(t - 1), I_1x1,
0.2)};
auto gm = std::make_shared<HybridGaussianConditional>(motion_model_t,
conditionals);
bn.push_back(gm);
bn.emplace_shared<HybridGaussianConditional>(motion_model_t, X(t), I_1x1,
X(t - 1), motionModels);
// Create prior on motion model M(t):
bn.emplace_shared<DiscreteConditional>(motion_model_t, "40/60");

View File

@ -30,7 +30,6 @@
#include <numeric>
#include "Switching.h"
#include "gtsam/nonlinear/NonlinearFactor.h"
// Include for test suite
#include <CppUnitLite/TestHarness.h>

View File

@ -55,13 +55,10 @@ void addMeasurement(HybridBayesNet &hbn, Key z_key, Key x_key, double sigma) {
/// Create hybrid motion model p(x1 | x0, m1)
static HybridGaussianConditional::shared_ptr CreateHybridMotionModel(
double mu0, double mu1, double sigma0, double sigma1) {
auto model0 = noiseModel::Isotropic::Sigma(1, sigma0);
auto model1 = noiseModel::Isotropic::Sigma(1, sigma1);
auto c0 = make_shared<GaussianConditional>(X(1), Vector1(mu0), I_1x1, X(0),
-I_1x1, model0),
c1 = make_shared<GaussianConditional>(X(1), Vector1(mu1), I_1x1, X(0),
-I_1x1, model1);
return std::make_shared<HybridGaussianConditional>(m1, std::vector{c0, c1});
std::vector<std::pair<Vector, double>> motionModels{{Vector1(mu0), sigma0},
{Vector1(mu1), sigma1}};
return std::make_shared<HybridGaussianConditional>(m1, X(1), I_1x1, X(0),
motionModels);
}
/// Create two state Bayes network with 1 or two measurement models

View File

@ -33,10 +33,7 @@
#include <gtsam/sam/BearingRangeFactor.h>
#include <gtsam/slam/BetweenFactor.h>
#include <numeric>
#include "Switching.h"
#include "gtsam/nonlinear/NonlinearFactor.h"
// Include for test suite
#include <CppUnitLite/TestHarness.h>

View File

@ -30,7 +30,6 @@
#include <numeric>
#include "Switching.h"
#include "gtsam/nonlinear/NonlinearFactor.h"
// Include for test suite
#include <CppUnitLite/TestHarness.h>