Merge pull request #1823 from borglab/improved-hybrid-api

release/4.3a0
Varun Agrawal 2024-09-06 12:22:56 -04:00 committed by GitHub
commit caf85c208e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 321 additions and 177 deletions

View File

@ -70,20 +70,6 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
factors_.push_back(conditional);
}
/**
* Preferred: add a conditional directly using a pointer.
*
* Examples:
* hbn.emplace_back(new GaussianMixture(...)));
* hbn.emplace_back(new GaussianConditional(...)));
* hbn.emplace_back(new DiscreteConditional(...)));
*/
template <class Conditional>
void emplace_back(Conditional *conditional) {
factors_.push_back(std::make_shared<HybridConditional>(
std::shared_ptr<Conditional>(conditional)));
}
/**
* Add a conditional using a shared_ptr, using implicit conversion to
* a HybridConditional.
@ -101,6 +87,36 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
std::make_shared<HybridConditional>(std::move(conditional)));
}
/**
* @brief Add a conditional to the Bayes net.
* Implicitly convert to a HybridConditional.
*
* E.g.
* hbn.push_back(std::make_shared<DiscreteConditional>(m, "1/1"));
*
* @tparam CONDITIONAL Type of conditional. This is shared_ptr version.
* @param conditional The conditional as a shared pointer.
*/
template <class CONDITIONAL>
void push_back(const std::shared_ptr<CONDITIONAL> &conditional) {
factors_.push_back(std::make_shared<HybridConditional>(conditional));
}
/**
* Preferred: Emplace a conditional directly using arguments.
*
* Examples:
* hbn.emplace_shared<GaussianMixture>(...)));
* hbn.emplace_shared<GaussianConditional>(...)));
* hbn.emplace_shared<DiscreteConditional>(...)));
*/
template <class CONDITIONAL, class... Args>
void emplace_shared(Args &&...args) {
auto cond = std::allocate_shared<CONDITIONAL>(
Eigen::aligned_allocator<CONDITIONAL>(), std::forward<Args>(args)...);
factors_.push_back(std::make_shared<HybridConditional>(std::move(cond)));
}
/**
* @brief Get the Gaussian Bayes Net which corresponds to a specific discrete
* value assignment.

View File

@ -0,0 +1,167 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file HybridValues.cpp
* @author Varun Agrawal
* @date August 2024
*/
#include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/HybridValues.h>
#include <gtsam/inference/Key.h>
#include <gtsam/linear/VectorValues.h>
#include <gtsam/nonlinear/Values.h>
namespace gtsam {
/* ************************************************************************* */
HybridValues::HybridValues(const VectorValues& cv, const DiscreteValues& dv)
: continuous_(cv), discrete_(dv) {}
/* ************************************************************************* */
HybridValues::HybridValues(const VectorValues& cv, const DiscreteValues& dv,
const Values& v)
: continuous_(cv), discrete_(dv), nonlinear_(v) {}
/* ************************************************************************* */
void HybridValues::print(const std::string& s,
const KeyFormatter& keyFormatter) const {
std::cout << s << ": \n";
continuous_.print(" Continuous",
keyFormatter); // print continuous components
discrete_.print(" Discrete", keyFormatter); // print discrete components
}
/* ************************************************************************* */
bool HybridValues::equals(const HybridValues& other, double tol) const {
return continuous_.equals(other.continuous_, tol) &&
discrete_.equals(other.discrete_, tol);
}
/* ************************************************************************* */
const VectorValues& HybridValues::continuous() const { return continuous_; }
/* ************************************************************************* */
const DiscreteValues& HybridValues::discrete() const { return discrete_; }
/* ************************************************************************* */
const Values& HybridValues::nonlinear() const { return nonlinear_; }
/* ************************************************************************* */
bool HybridValues::existsVector(Key j) { return continuous_.exists(j); }
/* ************************************************************************* */
bool HybridValues::existsDiscrete(Key j) {
return (discrete_.find(j) != discrete_.end());
}
/* ************************************************************************* */
bool HybridValues::existsNonlinear(Key j) { return nonlinear_.exists(j); }
/* ************************************************************************* */
bool HybridValues::exists(Key j) {
return existsVector(j) || existsDiscrete(j) || existsNonlinear(j);
}
/* ************************************************************************* */
HybridValues HybridValues::retract(const VectorValues& delta) const {
HybridValues updated(continuous_, discrete_, nonlinear_.retract(delta));
return updated;
}
/* ************************************************************************* */
void HybridValues::insert(Key j, const Vector& value) {
continuous_.insert(j, value);
}
/* ************************************************************************* */
void HybridValues::insert(Key j, size_t value) { discrete_[j] = value; }
/* ************************************************************************* */
void HybridValues::insert_or_assign(Key j, const Vector& value) {
continuous_.insert_or_assign(j, value);
}
/* ************************************************************************* */
void HybridValues::insert_or_assign(Key j, size_t value) {
discrete_[j] = value;
}
/* ************************************************************************* */
HybridValues& HybridValues::insert(const VectorValues& values) {
continuous_.insert(values);
return *this;
}
/* ************************************************************************* */
HybridValues& HybridValues::insert(const DiscreteValues& values) {
discrete_.insert(values);
return *this;
}
/* ************************************************************************* */
HybridValues& HybridValues::insert(const Values& values) {
nonlinear_.insert(values);
return *this;
}
/* ************************************************************************* */
HybridValues& HybridValues::insert(const HybridValues& values) {
continuous_.insert(values.continuous());
discrete_.insert(values.discrete());
nonlinear_.insert(values.nonlinear());
return *this;
}
/* ************************************************************************* */
Vector& HybridValues::at(Key j) { return continuous_.at(j); }
/* ************************************************************************* */
size_t& HybridValues::atDiscrete(Key j) { return discrete_.at(j); }
/* ************************************************************************* */
HybridValues& HybridValues::update(const VectorValues& values) {
continuous_.update(values);
return *this;
}
/* ************************************************************************* */
HybridValues& HybridValues::update(const DiscreteValues& values) {
discrete_.update(values);
return *this;
}
/* ************************************************************************* */
HybridValues& HybridValues::update(const HybridValues& values) {
continuous_.update(values.continuous());
discrete_.update(values.discrete());
return *this;
}
/* ************************************************************************* */
VectorValues HybridValues::continuousSubset(const KeyVector& keys) const {
VectorValues measurements;
for (const auto& key : keys) {
measurements.insert(key, continuous_.at(key));
}
return measurements;
}
/* ************************************************************************* */
std::string HybridValues::html(const KeyFormatter& keyFormatter) const {
std::stringstream ss;
ss << this->continuous_.html(keyFormatter);
ss << this->discrete_.html(keyFormatter);
return ss.str();
}
} // namespace gtsam

View File

@ -18,8 +18,6 @@
#pragma once
#include <gtsam/discrete/Assignment.h>
#include <gtsam/discrete/DiscreteKey.h>
#include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/inference/Key.h>
#include <gtsam/linear/VectorValues.h>
@ -55,13 +53,11 @@ class GTSAM_EXPORT HybridValues {
HybridValues() = default;
/// Construct from DiscreteValues and VectorValues.
HybridValues(const VectorValues& cv, const DiscreteValues& dv)
: continuous_(cv), discrete_(dv) {}
HybridValues(const VectorValues& cv, const DiscreteValues& dv);
/// Construct from all values types.
HybridValues(const VectorValues& cv, const DiscreteValues& dv,
const Values& v)
: continuous_(cv), discrete_(dv), nonlinear_(v) {}
const Values& v);
/// @}
/// @name Testable
@ -69,144 +65,105 @@ class GTSAM_EXPORT HybridValues {
/// print required by Testable for unit testing
void print(const std::string& s = "HybridValues",
const KeyFormatter& keyFormatter = DefaultKeyFormatter) const {
std::cout << s << ": \n";
continuous_.print(" Continuous",
keyFormatter); // print continuous components
discrete_.print(" Discrete", keyFormatter); // print discrete components
}
const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;
/// equals required by Testable for unit testing
bool equals(const HybridValues& other, double tol = 1e-9) const {
return continuous_.equals(other.continuous_, tol) &&
discrete_.equals(other.discrete_, tol);
}
bool equals(const HybridValues& other, double tol = 1e-9) const;
/// @}
/// @name Interface
/// @{
/// Return the multi-dimensional vector values.
const VectorValues& continuous() const { return continuous_; }
const VectorValues& continuous() const;
/// Return the discrete values.
const DiscreteValues& discrete() const { return discrete_; }
const DiscreteValues& discrete() const;
/// Return the nonlinear values.
const Values& nonlinear() const { return nonlinear_; }
const Values& nonlinear() const;
/// Check whether a variable with key \c j exists in VectorValues.
bool existsVector(Key j) { return continuous_.exists(j); }
bool existsVector(Key j);
/// Check whether a variable with key \c j exists in DiscreteValues.
bool existsDiscrete(Key j) { return (discrete_.find(j) != discrete_.end()); }
bool existsDiscrete(Key j);
/// Check whether a variable with key \c j exists in values.
bool existsNonlinear(Key j) { return nonlinear_.exists(j); }
bool existsNonlinear(Key j);
/// Check whether a variable with key \c j exists.
bool exists(Key j) {
return existsVector(j) || existsDiscrete(j) || existsNonlinear(j);
}
bool exists(Key j);
/** Add a delta config to current config and returns a new config */
HybridValues retract(const VectorValues& delta) const;
/** Insert a vector \c value with key \c j. Throws an invalid_argument
* exception if the key \c j is already used.
* @param value The vector to be inserted.
* @param j The index with which the value will be associated. */
void insert(Key j, const Vector& value) { continuous_.insert(j, value); }
void insert(Key j, const Vector& value);
/** Insert a discrete \c value with key \c j. Replaces the existing value if
* the key \c j is already used.
* @param value The vector to be inserted.
* @param j The index with which the value will be associated. */
void insert(Key j, size_t value) { discrete_[j] = value; }
void insert(Key j, size_t value);
/// insert_or_assign() , similar to Values.h
void insert_or_assign(Key j, const Vector& value) {
continuous_.insert_or_assign(j, value);
}
void insert_or_assign(Key j, const Vector& value);
/// insert_or_assign() , similar to Values.h
void insert_or_assign(Key j, size_t value) { discrete_[j] = value; }
void insert_or_assign(Key j, size_t value);
/** Insert all continuous values from \c values. Throws an invalid_argument
* exception if any keys to be inserted are already used. */
HybridValues& insert(const VectorValues& values) {
continuous_.insert(values);
return *this;
}
HybridValues& insert(const VectorValues& values);
/** Insert all discrete values from \c values. Throws an invalid_argument
* exception if any keys to be inserted are already used. */
HybridValues& insert(const DiscreteValues& values) {
discrete_.insert(values);
return *this;
}
HybridValues& insert(const DiscreteValues& values);
/** Insert all values from \c values. Throws an invalid_argument
* exception if any keys to be inserted are already used. */
HybridValues& insert(const Values& values) {
nonlinear_.insert(values);
return *this;
}
HybridValues& insert(const Values& values);
/** Insert all values from \c values. Throws an invalid_argument exception if
* any keys to be inserted are already used. */
HybridValues& insert(const HybridValues& values) {
continuous_.insert(values.continuous());
discrete_.insert(values.discrete());
nonlinear_.insert(values.nonlinear());
return *this;
}
HybridValues& insert(const HybridValues& values);
/**
* Read/write access to the vector value with key \c j, throws
* std::out_of_range if \c j does not exist.
*/
Vector& at(Key j) { return continuous_.at(j); }
Vector& at(Key j);
/**
* Read/write access to the discrete value with key \c j, throws
* std::out_of_range if \c j does not exist.
*/
size_t& atDiscrete(Key j) { return discrete_.at(j); }
size_t& atDiscrete(Key j);
/** For all key/value pairs in \c values, replace continuous values with
* corresponding keys in this object with those in \c values. Throws
* std::out_of_range if any keys in \c values are not present in this object.
*/
HybridValues& update(const VectorValues& values) {
continuous_.update(values);
return *this;
}
HybridValues& update(const VectorValues& values);
/** For all key/value pairs in \c values, replace discrete values with
* corresponding keys in this object with those in \c values. Throws
* std::out_of_range if any keys in \c values are not present in this object.
*/
HybridValues& update(const DiscreteValues& values) {
discrete_.update(values);
return *this;
}
HybridValues& update(const DiscreteValues& values);
/** For all key/value pairs in \c values, replace all values with
* corresponding keys in this object with those in \c values. Throws
* std::out_of_range if any keys in \c values are not present in this object.
*/
HybridValues& update(const HybridValues& values) {
continuous_.update(values.continuous());
discrete_.update(values.discrete());
return *this;
}
HybridValues& update(const HybridValues& values);
/// Extract continuous values with given keys.
VectorValues continuousSubset(const KeyVector& keys) const {
VectorValues measurements;
for (const auto& key : keys) {
measurements.insert(key, continuous_.at(key));
}
return measurements;
}
VectorValues continuousSubset(const KeyVector& keys) const;
/// @}
/// @name Wrapper support
@ -219,12 +176,7 @@ class GTSAM_EXPORT HybridValues {
* @return string html output.
*/
std::string html(
const KeyFormatter& keyFormatter = DefaultKeyFormatter) const {
std::stringstream ss;
ss << this->continuous_.html(keyFormatter);
ss << this->discrete_.html(keyFormatter);
return ss.str();
}
const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;
/// @}
};

View File

@ -43,12 +43,12 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1,
// Create Gaussian mixture z_i = x0 + noise for each measurement.
for (size_t i = 0; i < num_measurements; i++) {
const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode;
bayesNet.emplace_back(
new GaussianMixture({Z(i)}, {X(0)}, {mode_i},
{GaussianConditional::sharedMeanAndStddev(
Z(i), I_1x1, X(0), Z_1x1, 0.5),
GaussianConditional::sharedMeanAndStddev(
Z(i), I_1x1, X(0), Z_1x1, 3)}));
bayesNet.emplace_shared<GaussianMixture>(
KeyVector{Z(i)}, KeyVector{X(0)}, DiscreteKeys{mode_i},
std::vector{GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0),
Z_1x1, 0.5),
GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0),
Z_1x1, 3)});
}
// Create prior on X(0).
@ -58,7 +58,7 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1,
// Add prior on mode.
const size_t nrModes = manyModes ? num_measurements : 1;
for (size_t i = 0; i < nrModes; i++) {
bayesNet.emplace_back(new DiscreteConditional({M(i), 2}, "4/6"));
bayesNet.emplace_shared<DiscreteConditional>(DiscreteKey{M(i), 2}, "4/6");
}
return bayesNet;
}
@ -70,8 +70,7 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1,
* the generative Bayes net model HybridBayesNet::Example(num_measurements)
*/
inline HybridGaussianFactorGraph createHybridGaussianFactorGraph(
size_t num_measurements = 1,
std::optional<VectorValues> measurements = {},
size_t num_measurements = 1, std::optional<VectorValues> measurements = {},
bool manyModes = false) {
auto bayesNet = createHybridBayesNet(num_measurements, manyModes);
if (measurements) {

View File

@ -228,12 +228,12 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1,
auto c0 = make_shared<GaussianConditional>(z, Vector1(mu0), I_1x1, model0),
c1 = make_shared<GaussianConditional>(z, Vector1(mu1), I_1x1, model1);
auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1});
auto mixing = new DiscreteConditional(m, "0.5/0.5");
HybridBayesNet hbn;
hbn.emplace_back(gm);
hbn.emplace_back(mixing);
hbn.emplace_shared<GaussianMixture>(KeyVector{z}, KeyVector{},
DiscreteKeys{m}, std::vector{c0, c1});
auto mixing = make_shared<DiscreteConditional>(m, "0.5/0.5");
hbn.push_back(mixing);
return hbn;
}
@ -278,7 +278,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) {
// At the halfway point between the means, we should get P(m|z)=0.5
HybridBayesNet expected;
expected.emplace_back(new DiscreteConditional(m, "0.5/0.5"));
expected.emplace_shared<DiscreteConditional>(m, "0.5/0.5");
EXPECT(assert_equal(expected, *bn));
}
@ -350,10 +350,10 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel2) {
// At the halfway point between the means
HybridBayesNet expected;
expected.emplace_back(new DiscreteConditional(
m, {},
expected.emplace_shared<DiscreteConditional>(
m, DiscreteKeys{},
vector<double>{prob_m_z(mu1, mu0, sigma1, sigma0, m1_high),
prob_m_z(mu0, mu1, sigma0, sigma1, m1_high)}));
prob_m_z(mu0, mu1, sigma0, sigma1, m1_high)});
EXPECT(assert_equal(expected, *bn));
}
@ -401,9 +401,9 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0,
auto measurement_model = noiseModel::Isotropic::Sigma(1, measurement_sigma);
// Add measurement P(z0 | x0)
auto p_z0 = new GaussianConditional(z0, Vector1(0.0), -I_1x1, x0, I_1x1,
measurement_model);
hbn.emplace_back(p_z0);
auto p_z0 = std::make_shared<GaussianConditional>(
z0, Vector1(0.0), -I_1x1, x0, I_1x1, measurement_model);
hbn.push_back(p_z0);
// Add hybrid motion model
auto model0 = noiseModel::Isotropic::Sigma(1, sigma0);
@ -413,19 +413,20 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0,
c1 = make_shared<GaussianConditional>(x1, Vector1(mu1), I_1x1, x0,
-I_1x1, model1);
auto motion = new GaussianMixture({x1}, {x0}, {m1}, {c0, c1});
hbn.emplace_back(motion);
auto motion = std::make_shared<GaussianMixture>(
KeyVector{x1}, KeyVector{x0}, DiscreteKeys{m1}, std::vector{c0, c1});
hbn.push_back(motion);
if (add_second_measurement) {
// Add second measurement
auto p_z1 = new GaussianConditional(z1, Vector1(0.0), -I_1x1, x1, I_1x1,
measurement_model);
hbn.emplace_back(p_z1);
auto p_z1 = std::make_shared<GaussianConditional>(
z1, Vector1(0.0), -I_1x1, x1, I_1x1, measurement_model);
hbn.push_back(p_z1);
}
// Discrete uniform prior.
auto p_m1 = new DiscreteConditional(m1, "0.5/0.5");
hbn.emplace_back(p_m1);
auto p_m1 = std::make_shared<DiscreteConditional>(m1, "0.5/0.5");
hbn.push_back(p_m1);
return hbn;
}

View File

@ -43,7 +43,7 @@ static const DiscreteKey Asia(asiaKey, 2);
// Test creation of a pure discrete Bayes net.
TEST(HybridBayesNet, Creation) {
HybridBayesNet bayesNet;
bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1"));
bayesNet.emplace_shared<DiscreteConditional>(Asia, "99/1");
DiscreteConditional expected(Asia, "99/1");
CHECK(bayesNet.at(0)->asDiscrete());
@ -54,7 +54,7 @@ TEST(HybridBayesNet, Creation) {
// Test adding a Bayes net to another one.
TEST(HybridBayesNet, Add) {
HybridBayesNet bayesNet;
bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1"));
bayesNet.emplace_shared<DiscreteConditional>(Asia, "99/1");
HybridBayesNet other;
other.add(bayesNet);
@ -65,7 +65,7 @@ TEST(HybridBayesNet, Add) {
// Test evaluate for a pure discrete Bayes net P(Asia).
TEST(HybridBayesNet, EvaluatePureDiscrete) {
HybridBayesNet bayesNet;
bayesNet.emplace_back(new DiscreteConditional(Asia, "4/6"));
bayesNet.emplace_shared<DiscreteConditional>(Asia, "4/6");
HybridValues values;
values.insert(asiaKey, 0);
EXPECT_DOUBLES_EQUAL(0.4, bayesNet.evaluate(values), 1e-9);
@ -107,9 +107,10 @@ TEST(HybridBayesNet, evaluateHybrid) {
// Create hybrid Bayes net.
HybridBayesNet bayesNet;
bayesNet.push_back(continuousConditional);
bayesNet.emplace_back(
new GaussianMixture({X(1)}, {}, {Asia}, {conditional0, conditional1}));
bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1"));
bayesNet.emplace_shared<GaussianMixture>(
KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia},
std::vector{conditional0, conditional1});
bayesNet.emplace_shared<DiscreteConditional>(Asia, "99/1");
// Create values at which to evaluate.
HybridValues values;
@ -167,13 +168,14 @@ TEST(HybridBayesNet, Error) {
conditional1 = std::make_shared<GaussianConditional>(
X(1), Vector1::Constant(2), I_1x1, model1);
auto gm =
new GaussianMixture({X(1)}, {}, {Asia}, {conditional0, conditional1});
auto gm = std::make_shared<GaussianMixture>(
KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia},
std::vector{conditional0, conditional1});
// Create hybrid Bayes net.
HybridBayesNet bayesNet;
bayesNet.push_back(continuousConditional);
bayesNet.emplace_back(gm);
bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1"));
bayesNet.push_back(gm);
bayesNet.emplace_shared<DiscreteConditional>(Asia, "99/1");
// Create values at which to evaluate.
HybridValues values;

View File

@ -616,12 +616,12 @@ TEST(HybridEstimation, ModeSelection) {
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1));
bn.push_back(
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1));
bn.emplace_back(new GaussianMixture(
{Z(0)}, {X(0), X(1)}, {mode},
{GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1),
Z_1x1, noise_loose),
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1),
Z_1x1, noise_tight)}));
bn.emplace_shared<GaussianMixture>(
KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode},
std::vector{GaussianConditional::sharedMeanAndStddev(
Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_loose),
GaussianConditional::sharedMeanAndStddev(
Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_tight)});
VectorValues vv;
vv.insert(Z(0), Z_1x1);
@ -647,12 +647,12 @@ TEST(HybridEstimation, ModeSelection2) {
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1));
bn.push_back(
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1));
bn.emplace_back(new GaussianMixture(
{Z(0)}, {X(0), X(1)}, {mode},
{GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1),
Z_3x1, noise_loose),
GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1),
Z_3x1, noise_tight)}));
bn.emplace_shared<GaussianMixture>(
KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode},
std::vector{GaussianConditional::sharedMeanAndStddev(
Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_loose),
GaussianConditional::sharedMeanAndStddev(
Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_tight)});
VectorValues vv;
vv.insert(Z(0), Z_3x1);

View File

@ -651,7 +651,8 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) {
}
/* ****************************************************************************/
// Test hybrid gaussian factor graph errorTree when there is a HybridConditional in the graph
// Test hybrid gaussian factor graph errorTree when
// there is a HybridConditional in the graph
TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) {
using symbol_shorthand::F;
@ -665,12 +666,11 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) {
auto measurement_model = noiseModel::Isotropic::Sigma(1, 2.0);
// Set a prior P(x0) at x0=0
hbn.emplace_back(
new GaussianConditional(x0, Vector1(0.0), I_1x1, prior_model));
hbn.emplace_shared<GaussianConditional>(x0, Vector1(0.0), I_1x1, prior_model);
// Add measurement P(z0 | x0)
hbn.emplace_back(new GaussianConditional(z0, Vector1(0.0), -I_1x1, x0, I_1x1,
measurement_model));
hbn.emplace_shared<GaussianConditional>(z0, Vector1(0.0), -I_1x1, x0, I_1x1,
measurement_model);
// Add hybrid motion model
double mu = 0.0;
@ -681,10 +681,11 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) {
x0, -I_1x1, model0),
c1 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1,
x0, -I_1x1, model1);
hbn.emplace_back(new GaussianMixture({f01}, {x0, x1}, {m1}, {c0, c1}));
hbn.emplace_shared<GaussianMixture>(KeyVector{f01}, KeyVector{x0, x1},
DiscreteKeys{m1}, std::vector{c0, c1});
// Discrete uniform prior.
hbn.emplace_back(new DiscreteConditional(m1, "0.5/0.5"));
hbn.emplace_shared<DiscreteConditional>(m1, "0.5/0.5");
VectorValues given;
given.insert(z0, Vector1(0.0));
@ -804,11 +805,12 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) {
X(0), Vector1(14.1421), I_1x1 * 2.82843),
conditional1 = std::make_shared<GaussianConditional>(
X(0), Vector1(10.1379), I_1x1 * 2.02759);
expectedBayesNet.emplace_back(
new GaussianMixture({X(0)}, {}, {mode}, {conditional0, conditional1}));
expectedBayesNet.emplace_shared<GaussianMixture>(
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
std::vector{conditional0, conditional1});
// Add prior on mode.
expectedBayesNet.emplace_back(new DiscreteConditional(mode, "74/26"));
expectedBayesNet.emplace_shared<DiscreteConditional>(mode, "74/26");
// Test elimination
const auto posterior = fg.eliminateSequential();
@ -828,18 +830,20 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
HybridBayesNet bn;
// Create Gaussian mixture z_0 = x0 + noise for each measurement.
bn.emplace_back(new GaussianMixture(
{Z(0)}, {X(0)}, {mode},
{GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3),
auto gm = std::make_shared<GaussianMixture>(
KeyVector{Z(0)}, KeyVector{X(0)}, DiscreteKeys{mode},
std::vector{
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3),
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1,
0.5)}));
0.5)});
bn.push_back(gm);
// Create prior on X(0).
bn.push_back(
GaussianConditional::sharedMeanAndStddev(X(0), Vector1(5.0), 0.5));
// Add prior on mode.
bn.emplace_back(new DiscreteConditional(mode, "1/1"));
bn.emplace_shared<DiscreteConditional>(mode, "1/1");
// bn.print();
auto fg = bn.toFactorGraph(measurements);
@ -858,11 +862,12 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
X(0), Vector1(10.1379), I_1x1 * 2.02759),
conditional1 = std::make_shared<GaussianConditional>(
X(0), Vector1(14.1421), I_1x1 * 2.82843);
expectedBayesNet.emplace_back(
new GaussianMixture({X(0)}, {}, {mode}, {conditional0, conditional1}));
expectedBayesNet.emplace_shared<GaussianMixture>(
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
std::vector{conditional0, conditional1});
// Add prior on mode.
expectedBayesNet.emplace_back(new DiscreteConditional(mode, "1/1"));
expectedBayesNet.emplace_shared<DiscreteConditional>(mode, "1/1");
// Test elimination
const auto posterior = fg.eliminateSequential();
@ -894,11 +899,12 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) {
X(0), Vector1(17.3205), I_1x1 * 3.4641),
conditional1 = std::make_shared<GaussianConditional>(
X(0), Vector1(10.274), I_1x1 * 2.0548);
expectedBayesNet.emplace_back(
new GaussianMixture({X(0)}, {}, {mode}, {conditional0, conditional1}));
expectedBayesNet.emplace_shared<GaussianMixture>(
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
std::vector{conditional0, conditional1});
// Add prior on mode.
expectedBayesNet.emplace_back(new DiscreteConditional(mode, "23/77"));
expectedBayesNet.emplace_shared<DiscreteConditional>(mode, "23/77");
// Test elimination
const auto posterior = fg.eliminateSequential();
@ -940,30 +946,31 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) {
for (size_t t : {0, 1, 2}) {
// Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t):
const auto noise_mode_t = DiscreteKey{N(t), 2};
bn.emplace_back(
new GaussianMixture({Z(t)}, {X(t)}, {noise_mode_t},
{GaussianConditional::sharedMeanAndStddev(
Z(t), I_1x1, X(t), Z_1x1, 0.5),
GaussianConditional::sharedMeanAndStddev(
Z(t), I_1x1, X(t), Z_1x1, 3.0)}));
bn.emplace_shared<GaussianMixture>(
KeyVector{Z(t)}, KeyVector{X(t)}, DiscreteKeys{noise_mode_t},
std::vector{GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t),
Z_1x1, 0.5),
GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t),
Z_1x1, 3.0)});
// Create prior on discrete mode N(t):
bn.emplace_back(new DiscreteConditional(noise_mode_t, "20/80"));
bn.emplace_shared<DiscreteConditional>(noise_mode_t, "20/80");
}
// Add motion models:
for (size_t t : {2, 1}) {
// Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1):
const auto motion_model_t = DiscreteKey{M(t), 2};
bn.emplace_back(
new GaussianMixture({X(t)}, {X(t - 1)}, {motion_model_t},
{GaussianConditional::sharedMeanAndStddev(
auto gm = std::make_shared<GaussianMixture>(
KeyVector{X(t)}, KeyVector{X(t - 1)}, DiscreteKeys{motion_model_t},
std::vector{GaussianConditional::sharedMeanAndStddev(
X(t), I_1x1, X(t - 1), Z_1x1, 0.2),
GaussianConditional::sharedMeanAndStddev(
X(t), I_1x1, X(t - 1), I_1x1, 0.2)}));
X(t), I_1x1, X(t - 1), I_1x1, 0.2)});
bn.push_back(gm);
// Create prior on motion model M(t):
bn.emplace_back(new DiscreteConditional(motion_model_t, "40/60"));
bn.emplace_shared<DiscreteConditional>(motion_model_t, "40/60");
}
// Create Gaussian prior on continuous X(0) using sharedMeanAndStddev: