Merge branch 'develop' into hybrid-cleanup

release/4.3a0
Varun Agrawal 2024-09-18 17:58:38 -04:00
commit df0ff8a184
25 changed files with 190 additions and 166 deletions

View File

@ -50,22 +50,20 @@ DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1,
/* ************************************************************************ */
HybridFactor::HybridFactor(const KeyVector &keys)
: Base(keys),
category_(HybridCategory::Continuous),
continuousKeys_(keys) {}
: Base(keys), category_(Category::Continuous), continuousKeys_(keys) {}
/* ************************************************************************ */
HybridCategory GetCategory(const KeyVector &continuousKeys,
const DiscreteKeys &discreteKeys) {
HybridFactor::Category GetCategory(const KeyVector &continuousKeys,
const DiscreteKeys &discreteKeys) {
if ((continuousKeys.size() == 0) && (discreteKeys.size() != 0)) {
return HybridCategory::Discrete;
return HybridFactor::Category::Discrete;
} else if ((continuousKeys.size() != 0) && (discreteKeys.size() == 0)) {
return HybridCategory::Continuous;
return HybridFactor::Category::Continuous;
} else if ((continuousKeys.size() != 0) && (discreteKeys.size() != 0)) {
return HybridCategory::Hybrid;
return HybridFactor::Category::Hybrid;
} else {
// Case where we have no keys. Should never happen.
return HybridCategory::None;
return HybridFactor::Category::None;
}
}
@ -80,7 +78,7 @@ HybridFactor::HybridFactor(const KeyVector &continuousKeys,
/* ************************************************************************ */
HybridFactor::HybridFactor(const DiscreteKeys &discreteKeys)
: Base(CollectKeys({}, discreteKeys)),
category_(HybridCategory::Discrete),
category_(Category::Discrete),
discreteKeys_(discreteKeys),
continuousKeys_({}) {}
@ -97,16 +95,16 @@ void HybridFactor::print(const std::string &s,
const KeyFormatter &formatter) const {
std::cout << (s.empty() ? "" : s + "\n");
switch (category_) {
case HybridCategory::Continuous:
case Category::Continuous:
std::cout << "Continuous ";
break;
case HybridCategory::Discrete:
case Category::Discrete:
std::cout << "Discrete ";
break;
case HybridCategory::Hybrid:
case Category::Hybrid:
std::cout << "Hybrid ";
break;
case HybridCategory::None:
case Category::None:
std::cout << "None ";
break;
}

View File

@ -41,9 +41,6 @@ KeyVector CollectKeys(const KeyVector &keys1, const KeyVector &keys2);
DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1,
const DiscreteKeys &key2);
/// Enum to help with categorizing hybrid factors.
enum class HybridCategory { None, Discrete, Continuous, Hybrid };
/**
* Base class for *truly* hybrid probabilistic factors
*
@ -55,9 +52,13 @@ enum class HybridCategory { None, Discrete, Continuous, Hybrid };
* @ingroup hybrid
*/
class GTSAM_EXPORT HybridFactor : public Factor {
public:
/// Enum to help with categorizing hybrid factors.
enum class Category { None, Discrete, Continuous, Hybrid };
private:
/// Record what category of HybridFactor this is.
HybridCategory category_ = HybridCategory::None;
Category category_ = Category::None;
protected:
// Set of DiscreteKeys for this factor.
@ -118,13 +119,13 @@ class GTSAM_EXPORT HybridFactor : public Factor {
/// @{
/// True if this is a factor of discrete variables only.
bool isDiscrete() const { return category_ == HybridCategory::Discrete; }
bool isDiscrete() const { return category_ == Category::Discrete; }
/// True if this is a factor of continuous variables only.
bool isContinuous() const { return category_ == HybridCategory::Continuous; }
bool isContinuous() const { return category_ == Category::Continuous; }
/// True is this is a Discrete-Continuous factor.
bool isHybrid() const { return category_ == HybridCategory::Hybrid; }
bool isHybrid() const { return category_ == Category::Hybrid; }
/// Return the number of continuous variables in this factor.
size_t nrContinuous() const { return continuousKeys_.size(); }

View File

@ -55,23 +55,14 @@ HybridGaussianConditional::conditionals() const {
return conditionals_;
}
/* *******************************************************************************/
HybridGaussianConditional::HybridGaussianConditional(
KeyVector &&continuousFrontals, KeyVector &&continuousParents,
DiscreteKeys &&discreteParents,
std::vector<GaussianConditional::shared_ptr> &&conditionals)
: HybridGaussianConditional(continuousFrontals, continuousParents,
discreteParents,
Conditionals(discreteParents, conditionals)) {}
/* *******************************************************************************/
HybridGaussianConditional::HybridGaussianConditional(
const KeyVector &continuousFrontals, const KeyVector &continuousParents,
const DiscreteKeys &discreteParents,
const DiscreteKey &discreteParent,
const std::vector<GaussianConditional::shared_ptr> &conditionals)
: HybridGaussianConditional(continuousFrontals, continuousParents,
discreteParents,
Conditionals(discreteParents, conditionals)) {}
DiscreteKeys{discreteParent},
Conditionals({discreteParent}, conditionals)) {}
/* *******************************************************************************/
// TODO(dellaert): This is copy/paste: HybridGaussianConditional should be

View File

@ -107,29 +107,18 @@ class GTSAM_EXPORT HybridGaussianConditional
const Conditionals &conditionals);
/**
* @brief Make a Gaussian Mixture from a list of Gaussian conditionals
* @brief Make a Gaussian Mixture from a vector of Gaussian conditionals.
* The DecisionTree-based constructor is preferred over this one.
*
* @param continuousFrontals The continuous frontal variables
* @param continuousParents The continuous parent variables
* @param discreteParents Discrete parents variables
* @param conditionals List of conditionals
*/
HybridGaussianConditional(
KeyVector &&continuousFrontals, KeyVector &&continuousParents,
DiscreteKeys &&discreteParents,
std::vector<GaussianConditional::shared_ptr> &&conditionals);
/**
* @brief Make a Gaussian Mixture from a list of Gaussian conditionals
*
* @param continuousFrontals The continuous frontal variables
* @param continuousParents The continuous parent variables
* @param discreteParents Discrete parents variables
* @param conditionals List of conditionals
* @param discreteParent Single discrete parent variable
* @param conditionals Vector of conditionals with the same size as the
* cardinality of the discrete parent.
*/
HybridGaussianConditional(
const KeyVector &continuousFrontals, const KeyVector &continuousParents,
const DiscreteKeys &discreteParents,
const DiscreteKey &discreteParent,
const std::vector<GaussianConditional::shared_ptr> &conditionals);
/// @}

View File

@ -42,9 +42,11 @@ HybridGaussianFactor::Factors augment(
const HybridGaussianFactor::FactorValuePairs &factors) {
// Find the minimum value so we can "proselytize" to positive values.
// Done because we can't have sqrt of negative numbers.
auto unzipped_pair = unzip(factors);
const HybridGaussianFactor::Factors gaussianFactors = unzipped_pair.first;
const AlgebraicDecisionTree<Key> valueTree = unzipped_pair.second;
HybridGaussianFactor::Factors gaussianFactors;
AlgebraicDecisionTree<Key> valueTree;
std::tie(gaussianFactors, valueTree) = unzip(factors);
// Normalize
double min_value = valueTree.min();
AlgebraicDecisionTree<Key> values =
valueTree.apply([&min_value](double n) { return n - min_value; });

View File

@ -96,15 +96,15 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor {
* GaussianFactor shared pointers.
*
* @param continuousKeys Vector of keys for continuous factors.
* @param discreteKeys Vector of discrete keys.
* @param discreteKey The discrete key to index each component.
* @param factors Vector of gaussian factor shared pointers
* and arbitrary scalars.
* and arbitrary scalars. Same size as the cardinality of discreteKey.
*/
HybridGaussianFactor(const KeyVector &continuousKeys,
const DiscreteKeys &discreteKeys,
const DiscreteKey &discreteKey,
const std::vector<GaussianFactorValuePair> &factors)
: HybridGaussianFactor(continuousKeys, discreteKeys,
FactorValuePairs(discreteKeys, factors)) {}
: HybridGaussianFactor(continuousKeys, {discreteKey},
FactorValuePairs({discreteKey}, factors)) {}
/// @}
/// @name Testable

View File

@ -89,14 +89,15 @@ class HybridNonlinearFactor : public HybridFactor {
* @tparam FACTOR The type of the factor shared pointers being passed in.
* Will be typecast to NonlinearFactor shared pointers.
* @param keys Vector of keys for continuous factors.
* @param discreteKeys Vector of discrete keys.
* @param discreteKey The discrete key indexing each component factor.
* @param factors Vector of nonlinear factor and scalar pairs.
* Same size as the cardinality of discreteKey.
*/
template <typename FACTOR>
HybridNonlinearFactor(
const KeyVector& keys, const DiscreteKeys& discreteKeys,
const KeyVector& keys, const DiscreteKey& discreteKey,
const std::vector<std::pair<std::shared_ptr<FACTOR>, double>>& factors)
: Base(keys, discreteKeys) {
: Base(keys, {discreteKey}) {
std::vector<NonlinearFactorValuePair> nonlinear_factors;
KeySet continuous_keys_set(keys.begin(), keys.end());
KeySet factor_keys_set;
@ -112,7 +113,7 @@ class HybridNonlinearFactor : public HybridFactor {
"Factors passed into HybridNonlinearFactor need to be nonlinear!");
}
}
factors_ = Factors(discreteKeys, nonlinear_factors);
factors_ = Factors({discreteKey}, nonlinear_factors);
if (continuous_keys_set != factor_keys_set) {
throw std::runtime_error(
@ -134,7 +135,7 @@ class HybridNonlinearFactor : public HybridFactor {
auto errorFunc =
[continuousValues](const std::pair<sharedFactor, double>& f) {
auto [factor, val] = f;
return factor->error(continuousValues) + (0.5 * val * val);
return factor->error(continuousValues) + (0.5 * val);
};
DecisionTree<Key, double> result(factors_, errorFunc);
return result;
@ -153,7 +154,7 @@ class HybridNonlinearFactor : public HybridFactor {
auto [factor, val] = factors_(discreteValues);
// Compute the error for the selected factor
const double factorError = factor->error(continuousValues);
return factorError + (0.5 * val * val);
return factorError + (0.5 * val);
}
/**

View File

@ -76,7 +76,7 @@ virtual class HybridConditional {
class HybridGaussianFactor : gtsam::HybridFactor {
HybridGaussianFactor(
const gtsam::KeyVector& continuousKeys,
const gtsam::DiscreteKeys& discreteKeys,
const gtsam::DiscreteKey& discreteKey,
const std::vector<std::pair<gtsam::GaussianFactor::shared_ptr, double>>&
factorsList);
@ -91,8 +91,12 @@ class HybridGaussianConditional : gtsam::HybridFactor {
const gtsam::KeyVector& continuousFrontals,
const gtsam::KeyVector& continuousParents,
const gtsam::DiscreteKeys& discreteParents,
const std::vector<gtsam::GaussianConditional::shared_ptr>&
conditionalsList);
const gtsam::HybridGaussianConditional::Conditionals& conditionals);
HybridGaussianConditional(
const gtsam::KeyVector& continuousFrontals,
const gtsam::KeyVector& continuousParents,
const gtsam::DiscreteKey& discreteParent,
const std::vector<gtsam::GaussianConditional::shared_ptr>& conditionals);
gtsam::HybridGaussianFactor* likelihood(
const gtsam::VectorValues& frontals) const;
@ -248,7 +252,7 @@ class HybridNonlinearFactor : gtsam::HybridFactor {
bool normalized = false);
HybridNonlinearFactor(
const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys,
const gtsam::KeyVector& keys, const gtsam::DiscreteKey& discreteKey,
const std::vector<std::pair<gtsam::NonlinearFactor*, double>>& factors,
bool normalized = false);

View File

@ -57,15 +57,16 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain(
// keyFunc(1) to keyFunc(n+1)
for (size_t t = 1; t < n; t++) {
std::vector<GaussianFactorValuePair> components = {
{std::make_shared<JacobianFactor>(keyFunc(t), I_3x3, keyFunc(t + 1),
I_3x3, Z_3x1),
0.0},
{std::make_shared<JacobianFactor>(keyFunc(t), I_3x3, keyFunc(t + 1),
I_3x3, Vector3::Ones()),
0.0}};
hfg.add(HybridGaussianFactor({keyFunc(t), keyFunc(t + 1)},
{{dKeyFunc(t), 2}}, components));
DiscreteKeys dKeys{{dKeyFunc(t), 2}};
HybridGaussianFactor::FactorValuePairs components(
dKeys, {{std::make_shared<JacobianFactor>(keyFunc(t), I_3x3,
keyFunc(t + 1), I_3x3, Z_3x1),
0.0},
{std::make_shared<JacobianFactor>(
keyFunc(t), I_3x3, keyFunc(t + 1), I_3x3, Vector3::Ones()),
0.0}});
hfg.add(
HybridGaussianFactor({keyFunc(t), keyFunc(t + 1)}, dKeys, components));
if (t > 1) {
hfg.add(DecisionTreeFactor({{dKeyFunc(t - 1), 2}, {dKeyFunc(t), 2}},
@ -167,8 +168,8 @@ struct Switching {
components.push_back(
{std::dynamic_pointer_cast<NonlinearFactor>(f), 0.0});
}
nonlinearFactorGraph.emplace_shared<HybridNonlinearFactor>(
keys, DiscreteKeys{modes[k]}, components);
nonlinearFactorGraph.emplace_shared<HybridNonlinearFactor>(keys, modes[k],
components);
}
// Add measurement factors

View File

@ -43,12 +43,11 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1,
// Create Gaussian mixture z_i = x0 + noise for each measurement.
for (size_t i = 0; i < num_measurements; i++) {
const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode;
std::vector<GaussianConditional::shared_ptr> conditionals{
GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 0.5),
GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 3)};
bayesNet.emplace_shared<HybridGaussianConditional>(
KeyVector{Z(i)}, KeyVector{X(0)}, DiscreteKeys{mode_i},
std::vector{GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0),
Z_1x1, 0.5),
GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0),
Z_1x1, 3)});
KeyVector{Z(i)}, KeyVector{X(0)}, mode_i, conditionals);
}
// Create prior on X(0).

View File

@ -108,7 +108,7 @@ TEST(HybridBayesNet, evaluateHybrid) {
HybridBayesNet bayesNet;
bayesNet.push_back(continuousConditional);
bayesNet.emplace_shared<HybridGaussianConditional>(
KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia},
KeyVector{X(1)}, KeyVector{}, Asia,
std::vector{conditional0, conditional1});
bayesNet.emplace_shared<DiscreteConditional>(Asia, "99/1");
@ -169,7 +169,7 @@ TEST(HybridBayesNet, Error) {
X(1), Vector1::Constant(2), I_1x1, model1);
auto gm = std::make_shared<HybridGaussianConditional>(
KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia},
KeyVector{X(1)}, KeyVector{}, Asia,
std::vector{conditional0, conditional1});
// Create hybrid Bayes net.
HybridBayesNet bayesNet;
@ -383,17 +383,16 @@ TEST(HybridBayesNet, Sampling) {
HybridNonlinearFactorGraph nfg;
auto noise_model = noiseModel::Diagonal::Sigmas(Vector1(1.0));
nfg.emplace_shared<PriorFactor<double>>(X(0), 0.0, noise_model);
auto zero_motion =
std::make_shared<BetweenFactor<double>>(X(0), X(1), 0, noise_model);
auto one_motion =
std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model);
DiscreteKeys discreteKeys{DiscreteKey(M(0), 2)};
HybridNonlinearFactor::Factors factors(
discreteKeys, {{zero_motion, 0.0}, {one_motion, 0.0}});
nfg.emplace_shared<PriorFactor<double>>(X(0), 0.0, noise_model);
nfg.emplace_shared<HybridNonlinearFactor>(KeyVector{X(0), X(1)}, discreteKeys,
factors);
nfg.emplace_shared<HybridNonlinearFactor>(
KeyVector{X(0), X(1)}, DiscreteKey(M(0), 2),
std::vector<NonlinearFactorValuePair>{{zero_motion, 0.0},
{one_motion, 0.0}});
DiscreteKey mode(M(0), 2);
nfg.emplace_shared<DiscreteDistribution>(mode, "1/1");

View File

@ -437,8 +437,8 @@ static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() {
std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model);
std::vector<NonlinearFactorValuePair> components = {{zero_motion, 0.0},
{one_motion, 0.0}};
nfg.emplace_shared<HybridNonlinearFactor>(KeyVector{X(0), X(1)},
DiscreteKeys{m}, components);
nfg.emplace_shared<HybridNonlinearFactor>(KeyVector{X(0), X(1)}, m,
components);
return nfg;
}
@ -583,9 +583,6 @@ TEST(HybridEstimation, ModeSelection) {
graph.emplace_shared<PriorFactor<double>>(X(0), 0.0, measurement_model);
graph.emplace_shared<PriorFactor<double>>(X(1), 0.0, measurement_model);
DiscreteKeys modes;
modes.emplace_back(M(0), 2);
// The size of the noise model
size_t d = 1;
double noise_tight = 0.5, noise_loose = 5.0;
@ -594,11 +591,11 @@ TEST(HybridEstimation, ModeSelection) {
X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_loose)),
model1 = std::make_shared<MotionModel>(
X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight));
std::vector<NonlinearFactorValuePair> components = {{model0, 0.0},
{model1, 0.0}};
KeyVector keys = {X(0), X(1)};
DiscreteKey modes(M(0), 2);
HybridNonlinearFactor mf(keys, modes, components);
initial.insert(X(0), 0.0);
@ -617,18 +614,22 @@ TEST(HybridEstimation, ModeSelection) {
/**************************************************************/
HybridBayesNet bn;
const DiscreteKey mode{M(0), 2};
const DiscreteKey mode(M(0), 2);
bn.push_back(
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1));
bn.push_back(
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1));
std::vector<GaussianConditional::shared_ptr> conditionals{
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1),
Z_1x1, noise_loose),
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1),
Z_1x1, noise_tight)};
bn.emplace_shared<HybridGaussianConditional>(
KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode},
std::vector{GaussianConditional::sharedMeanAndStddev(
Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_loose),
GaussianConditional::sharedMeanAndStddev(
Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_tight)});
HybridGaussianConditional::Conditionals(DiscreteKeys{mode},
conditionals));
VectorValues vv;
vv.insert(Z(0), Z_1x1);
@ -648,18 +649,22 @@ TEST(HybridEstimation, ModeSelection2) {
double noise_tight = 0.5, noise_loose = 5.0;
HybridBayesNet bn;
const DiscreteKey mode{M(0), 2};
const DiscreteKey mode(M(0), 2);
bn.push_back(
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1));
bn.push_back(
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1));
std::vector<GaussianConditional::shared_ptr> conditionals{
GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1),
Z_3x1, noise_loose),
GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1),
Z_3x1, noise_tight)};
bn.emplace_shared<HybridGaussianConditional>(
KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode},
std::vector{GaussianConditional::sharedMeanAndStddev(
Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_loose),
GaussianConditional::sharedMeanAndStddev(
Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_tight)});
HybridGaussianConditional::Conditionals(DiscreteKeys{mode},
conditionals));
VectorValues vv;
vv.insert(Z(0), Z_3x1);
@ -679,18 +684,15 @@ TEST(HybridEstimation, ModeSelection2) {
graph.emplace_shared<PriorFactor<Vector3>>(X(0), Z_3x1, measurement_model);
graph.emplace_shared<PriorFactor<Vector3>>(X(1), Z_3x1, measurement_model);
DiscreteKeys modes;
modes.emplace_back(M(0), 2);
auto model0 = std::make_shared<BetweenFactor<Vector3>>(
X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_loose)),
model1 = std::make_shared<BetweenFactor<Vector3>>(
X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_tight));
std::vector<NonlinearFactorValuePair> components = {{model0, 0.0},
{model1, 0.0}};
KeyVector keys = {X(0), X(1)};
DiscreteKey modes(M(0), 2);
HybridNonlinearFactor mf(keys, modes, components);
initial.insert<Vector3>(X(0), Z_3x1);

View File

@ -52,7 +52,9 @@ const std::vector<GaussianConditional::shared_ptr> conditionals{
commonSigma),
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
commonSigma)};
const HybridGaussianConditional mixture({Z(0)}, {X(0)}, {mode}, conditionals);
const HybridGaussianConditional mixture(
{Z(0)}, {X(0)}, {mode},
HybridGaussianConditional::Conditionals({mode}, conditionals));
} // namespace equal_constants
/* ************************************************************************* */
@ -153,7 +155,9 @@ const std::vector<GaussianConditional::shared_ptr> conditionals{
0.5),
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
3.0)};
const HybridGaussianConditional mixture({Z(0)}, {X(0)}, {mode}, conditionals);
const HybridGaussianConditional mixture(
{Z(0)}, {X(0)}, {mode},
HybridGaussianConditional::Conditionals({mode}, conditionals));
} // namespace mode_dependent_constants
/* ************************************************************************* */

View File

@ -233,8 +233,11 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1,
c1 = make_shared<GaussianConditional>(z, Vector1(mu1), I_1x1, model1);
HybridBayesNet hbn;
DiscreteKeys discreteParents{m};
hbn.emplace_shared<HybridGaussianConditional>(
KeyVector{z}, KeyVector{}, DiscreteKeys{m}, std::vector{c0, c1});
KeyVector{z}, KeyVector{}, discreteParents,
HybridGaussianConditional::Conditionals(discreteParents,
std::vector{c0, c1}));
auto mixing = make_shared<DiscreteConditional>(m, "50/50");
hbn.push_back(mixing);
@ -408,8 +411,11 @@ static HybridGaussianConditional::shared_ptr CreateHybridMotionModel(
-I_1x1, model0),
c1 = make_shared<GaussianConditional>(X(1), Vector1(mu1), I_1x1, X(0),
-I_1x1, model1);
DiscreteKeys discreteParents{m1};
return std::make_shared<HybridGaussianConditional>(
KeyVector{X(1)}, KeyVector{X(0)}, DiscreteKeys{m1}, std::vector{c0, c1});
KeyVector{X(1)}, KeyVector{X(0)}, discreteParents,
HybridGaussianConditional::Conditionals(discreteParents,
std::vector{c0, c1}));
}
/// Create two state Bayes network with 1 or two measurement models

View File

@ -181,7 +181,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) {
std::vector<GaussianFactorValuePair> factors = {
{std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0},
{std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0}};
hfg.add(HybridGaussianFactor({X(1)}, {{M(1), 2}}, factors));
hfg.add(HybridGaussianFactor({X(1)}, {M(1), 2}, factors));
hfg.add(DecisionTreeFactor(m1, {2, 8}));
// TODO(Varun) Adding extra discrete variable not connected to continuous
@ -241,7 +241,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
std::vector<GaussianFactorValuePair> factors = {
{std::make_shared<JacobianFactor>(X(0), I_3x3, Z_3x1), 0.0},
{std::make_shared<JacobianFactor>(X(0), I_3x3, Vector3::Ones()), 0.0}};
hfg.add(HybridGaussianFactor({X(0)}, {{M(0), 2}}, factors));
hfg.add(HybridGaussianFactor({X(0)}, {M(0), 2}, factors));
DecisionTree<Key, GaussianFactorValuePair> dt1(
M(1), {std::make_shared<JacobianFactor>(X(2), I_3x3, Z_3x1), 0.0},
@ -682,8 +682,11 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) {
x0, -I_1x1, model0),
c1 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1,
x0, -I_1x1, model1);
DiscreteKeys discreteParents{m1};
hbn.emplace_shared<HybridGaussianConditional>(
KeyVector{f01}, KeyVector{x0, x1}, DiscreteKeys{m1}, std::vector{c0, c1});
KeyVector{f01}, KeyVector{x0, x1}, discreteParents,
HybridGaussianConditional::Conditionals(discreteParents,
std::vector{c0, c1}));
// Discrete uniform prior.
hbn.emplace_shared<DiscreteConditional>(m1, "0.5/0.5");
@ -806,9 +809,11 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) {
X(0), Vector1(14.1421), I_1x1 * 2.82843),
conditional1 = std::make_shared<GaussianConditional>(
X(0), Vector1(10.1379), I_1x1 * 2.02759);
DiscreteKeys discreteParents{mode};
expectedBayesNet.emplace_shared<HybridGaussianConditional>(
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
std::vector{conditional0, conditional1});
KeyVector{X(0)}, KeyVector{}, discreteParents,
HybridGaussianConditional::Conditionals(
discreteParents, std::vector{conditional0, conditional1}));
// Add prior on mode.
expectedBayesNet.emplace_shared<DiscreteConditional>(mode, "74/26");
@ -831,12 +836,13 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
HybridBayesNet bn;
// Create Gaussian mixture z_0 = x0 + noise for each measurement.
std::vector<GaussianConditional::shared_ptr> conditionals{
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3),
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 0.5)};
auto gm = std::make_shared<HybridGaussianConditional>(
KeyVector{Z(0)}, KeyVector{X(0)}, DiscreteKeys{mode},
std::vector{
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3),
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1,
0.5)});
HybridGaussianConditional::Conditionals(DiscreteKeys{mode},
conditionals));
bn.push_back(gm);
// Create prior on X(0).
@ -865,7 +871,8 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
X(0), Vector1(14.1421), I_1x1 * 2.82843);
expectedBayesNet.emplace_shared<HybridGaussianConditional>(
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
std::vector{conditional0, conditional1});
HybridGaussianConditional::Conditionals(
DiscreteKeys{mode}, std::vector{conditional0, conditional1}));
// Add prior on mode.
expectedBayesNet.emplace_shared<DiscreteConditional>(mode, "1/1");
@ -902,7 +909,8 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) {
X(0), Vector1(10.274), I_1x1 * 2.0548);
expectedBayesNet.emplace_shared<HybridGaussianConditional>(
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
std::vector{conditional0, conditional1});
HybridGaussianConditional::Conditionals(
DiscreteKeys{mode}, std::vector{conditional0, conditional1}));
// Add prior on mode.
expectedBayesNet.emplace_shared<DiscreteConditional>(mode, "23/77");
@ -947,12 +955,14 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) {
for (size_t t : {0, 1, 2}) {
// Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t):
const auto noise_mode_t = DiscreteKey{N(t), 2};
std::vector<GaussianConditional::shared_ptr> conditionals{
GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), Z_1x1, 0.5),
GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), Z_1x1,
3.0)};
bn.emplace_shared<HybridGaussianConditional>(
KeyVector{Z(t)}, KeyVector{X(t)}, DiscreteKeys{noise_mode_t},
std::vector{GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t),
Z_1x1, 0.5),
GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t),
Z_1x1, 3.0)});
HybridGaussianConditional::Conditionals(DiscreteKeys{noise_mode_t},
conditionals));
// Create prior on discrete mode N(t):
bn.emplace_shared<DiscreteConditional>(noise_mode_t, "20/80");
@ -962,12 +972,15 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) {
for (size_t t : {2, 1}) {
// Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1):
const auto motion_model_t = DiscreteKey{M(t), 2};
std::vector<GaussianConditional::shared_ptr> conditionals{
GaussianConditional::sharedMeanAndStddev(X(t), I_1x1, X(t - 1), Z_1x1,
0.2),
GaussianConditional::sharedMeanAndStddev(X(t), I_1x1, X(t - 1), I_1x1,
0.2)};
auto gm = std::make_shared<HybridGaussianConditional>(
KeyVector{X(t)}, KeyVector{X(t - 1)}, DiscreteKeys{motion_model_t},
std::vector{GaussianConditional::sharedMeanAndStddev(
X(t), I_1x1, X(t - 1), Z_1x1, 0.2),
GaussianConditional::sharedMeanAndStddev(
X(t), I_1x1, X(t - 1), I_1x1, 0.2)});
HybridGaussianConditional::Conditionals(DiscreteKeys{motion_model_t},
conditionals));
bn.push_back(gm);
// Create prior on motion model M(t):

View File

@ -423,7 +423,7 @@ TEST(HybridGaussianISAM, NonTrivial) {
std::vector<std::pair<PlanarMotionModel::shared_ptr, double>> components = {
{moving, 0.0}, {still, 0.0}};
auto mixtureFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
contKeys, gtsam::DiscreteKey(M(1), 2), components);
fg.push_back(mixtureFactor);
// Add equivalent of ImuFactor
@ -463,7 +463,7 @@ TEST(HybridGaussianISAM, NonTrivial) {
std::make_shared<PlanarMotionModel>(W(1), W(2), odometry, noise_model);
components = {{moving, 0.0}, {still, 0.0}};
mixtureFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components);
contKeys, gtsam::DiscreteKey(M(2), 2), components);
fg.push_back(mixtureFactor);
// Add equivalent of ImuFactor
@ -506,7 +506,7 @@ TEST(HybridGaussianISAM, NonTrivial) {
std::make_shared<PlanarMotionModel>(W(2), W(3), odometry, noise_model);
components = {{moving, 0.0}, {still, 0.0}};
mixtureFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components);
contKeys, gtsam::DiscreteKey(M(3), 2), components);
fg.push_back(mixtureFactor);
// Add equivalent of ImuFactor

View File

@ -135,7 +135,7 @@ TEST(HybridGaussianFactorGraph, Resize) {
std::vector<std::pair<MotionModel::shared_ptr, double>> components = {
{still, 0.0}, {moving, 0.0}};
auto dcFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
contKeys, gtsam::DiscreteKey(M(1), 2), components);
nhfg.push_back(dcFactor);
Values linearizationPoint;
@ -170,12 +170,12 @@ TEST(HybridGaussianFactorGraph, HybridNonlinearFactor) {
// Check for exception when number of continuous keys are under-specified.
KeyVector contKeys = {X(0)};
THROWS_EXCEPTION(std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components));
contKeys, gtsam::DiscreteKey(M(1), 2), components));
// Check for exception when number of continuous keys are too many.
contKeys = {X(0), X(1), X(2)};
THROWS_EXCEPTION(std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components));
contKeys, gtsam::DiscreteKey(M(1), 2), components));
}
/*****************************************************************************
@ -807,7 +807,7 @@ TEST(HybridFactorGraph, DefaultDecisionTree) {
std::vector<std::pair<PlanarMotionModel::shared_ptr, double>> motion_models =
{{still, 0.0}, {moving, 0.0}};
fg.emplace_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, motion_models);
contKeys, gtsam::DiscreteKey(M(1), 2), motion_models);
// Add Range-Bearing measurements to from X0 to L0 and X1 to L1.
// create a noise model for the landmark measurements

View File

@ -442,7 +442,7 @@ TEST(HybridNonlinearISAM, NonTrivial) {
std::vector<std::pair<PlanarMotionModel::shared_ptr, double>> components = {
{moving, 0.0}, {still, 0.0}};
auto mixtureFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
contKeys, gtsam::DiscreteKey(M(1), 2), components);
fg.push_back(mixtureFactor);
// Add equivalent of ImuFactor
@ -482,7 +482,7 @@ TEST(HybridNonlinearISAM, NonTrivial) {
std::make_shared<PlanarMotionModel>(W(1), W(2), odometry, noise_model);
components = {{moving, 0.0}, {still, 0.0}};
mixtureFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components);
contKeys, gtsam::DiscreteKey(M(2), 2), components);
fg.push_back(mixtureFactor);
// Add equivalent of ImuFactor
@ -525,7 +525,7 @@ TEST(HybridNonlinearISAM, NonTrivial) {
std::make_shared<PlanarMotionModel>(W(2), W(3), odometry, noise_model);
components = {{moving, 0.0}, {still, 0.0}};
mixtureFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components);
contKeys, gtsam::DiscreteKey(M(3), 2), components);
fg.push_back(mixtureFactor);
// Add equivalent of ImuFactor

View File

@ -76,7 +76,7 @@ BOOST_CLASS_EXPORT_GUID(HybridBayesNet, "gtsam_HybridBayesNet");
// Test HybridGaussianFactor serialization.
TEST(HybridSerialization, HybridGaussianFactor) {
KeyVector continuousKeys{X(0)};
DiscreteKeys discreteKeys{{M(0), 2}};
DiscreteKey discreteKey{M(0), 2};
auto A = Matrix::Zero(2, 1);
auto b0 = Matrix::Zero(2, 1);
@ -85,7 +85,7 @@ TEST(HybridSerialization, HybridGaussianFactor) {
auto f1 = std::make_shared<JacobianFactor>(X(0), A, b1);
std::vector<GaussianFactorValuePair> factors{{f0, 0.0}, {f1, 0.0}};
const HybridGaussianFactor factor(continuousKeys, discreteKeys, factors);
const HybridGaussianFactor factor(continuousKeys, discreteKey, factors);
EXPECT(equalsObj<HybridGaussianFactor>(factor));
EXPECT(equalsXML<HybridGaussianFactor>(factor));
@ -116,7 +116,8 @@ TEST(HybridSerialization, HybridGaussianConditional) {
const auto conditional1 = std::make_shared<GaussianConditional>(
GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3));
const HybridGaussianConditional gm({Z(0)}, {X(0)}, {mode},
{conditional0, conditional1});
HybridGaussianConditional::Conditionals(
{mode}, {conditional0, conditional1}));
EXPECT(equalsObj<HybridGaussianConditional>(gm));
EXPECT(equalsXML<HybridGaussianConditional>(gm));

View File

@ -714,6 +714,9 @@ double ComputeLogNormalizer(
const noiseModel::Gaussian::shared_ptr& noise_model) {
// Since noise models are Gaussian, we can get the logDeterminant using
// the same trick as in GaussianConditional
// Sigma = (R'R)^{-1}, det(Sigma) = det((R'R)^{-1}) = det(R'R)^{-1}
// log det(Sigma) = -log(det(R'R)) = -2*log(det(R))
// Hence, log det(Sigma)) = -2.0 * logDetR()
double logDetR = noise_model->R()
.diagonal()
.unaryExpr([](double x) { return log(x); })

View File

@ -752,7 +752,7 @@ namespace gtsam {
template<> struct traits<noiseModel::Unit> : public Testable<noiseModel::Unit> {};
/**
* @brief Helper function to compute the sqrt(|2πΣ|) normalizer values
* @brief Helper function to compute the log(|2πΣ|) normalizer values
* for a Gaussian noise model.
* We compute this in the log-space for numerical accuracy.
*

View File

@ -807,6 +807,26 @@ TEST(NoiseModel, NonDiagonalGaussian)
}
}
TEST(NoiseModel, ComputeLogNormalizer) {
// Very simple 1D noise model, which we can compute by hand.
double sigma = 0.1;
auto noise_model = Isotropic::Sigma(1, sigma);
double actual_value = ComputeLogNormalizer(noise_model);
// Compute log(|2πΣ|) by hand.
// = log(2π) + log(Σ) (since it is 1D)
constexpr double log2pi = 1.8378770664093454835606594728112;
double expected_value = log2pi + log(sigma * sigma);
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
// Similar situation in the 3D case
size_t n = 3;
auto noise_model2 = Isotropic::Sigma(n, sigma);
double actual_value2 = ComputeLogNormalizer(noise_model2);
// We multiply by 3 due to the determinant
double expected_value2 = n * (log2pi + log(sigma * sigma));
EXPECT_DOUBLES_EQUAL(expected_value2, actual_value2, 1e-9);
}
/* ************************************************************************* */
int main() { TestResult tr; return TestRegistry::runAllTests(tr); }
/* ************************************************************************* */

View File

@ -43,14 +43,12 @@ class TestHybridBayesNet(GtsamTestCase):
# Create the conditionals
conditional0 = GaussianConditional(X(1), [5], I_1x1, model0)
conditional1 = GaussianConditional(X(1), [2], I_1x1, model1)
discrete_keys = DiscreteKeys()
discrete_keys.push_back(Asia)
# Create hybrid Bayes net.
bayesNet = HybridBayesNet()
bayesNet.push_back(conditional)
bayesNet.push_back(
HybridGaussianConditional([X(1)], [], discrete_keys,
HybridGaussianConditional([X(1)], [], Asia,
[conditional0, conditional1]))
bayesNet.push_back(DiscreteConditional(Asia, "99/1"))

View File

@ -20,7 +20,7 @@ import gtsam
from gtsam import (DiscreteConditional, DiscreteKeys, GaussianConditional,
HybridBayesNet, HybridGaussianConditional,
HybridGaussianFactor, HybridGaussianFactorGraph,
HybridValues, JacobianFactor, Ordering, noiseModel)
HybridValues, JacobianFactor, noiseModel)
DEBUG_MARGINALS = False
@ -31,13 +31,11 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
def test_create(self):
"""Test construction of hybrid factor graph."""
model = noiseModel.Unit.Create(3)
dk = DiscreteKeys()
dk.push_back((C(0), 2))
jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model)
jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model)
gmf = HybridGaussianFactor([X(0)], dk, [(jf1, 0), (jf2, 0)])
gmf = HybridGaussianFactor([X(0)], (C(0), 2), [(jf1, 0), (jf2, 0)])
hfg = HybridGaussianFactorGraph()
hfg.push_back(jf1)
@ -58,13 +56,11 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
def test_optimize(self):
"""Test construction of hybrid factor graph."""
model = noiseModel.Unit.Create(3)
dk = DiscreteKeys()
dk.push_back((C(0), 2))
jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model)
jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model)
gmf = HybridGaussianFactor([X(0)], dk, [(jf1, 0), (jf2, 0)])
gmf = HybridGaussianFactor([X(0)], (C(0), 2), [(jf1, 0), (jf2, 0)])
hfg = HybridGaussianFactorGraph()
hfg.push_back(jf1)
@ -96,8 +92,6 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
# Create Gaussian mixture Z(0) = X(0) + noise for each measurement.
I_1x1 = np.eye(1)
keys = DiscreteKeys()
keys.push_back(mode)
for i in range(num_measurements):
conditional0 = GaussianConditional.FromMeanAndStddev(Z(i),
I_1x1,
@ -108,7 +102,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
X(0), [0],
sigma=3)
bayesNet.push_back(
HybridGaussianConditional([Z(i)], [X(0)], keys,
HybridGaussianConditional([Z(i)], [X(0)], mode,
[conditional0, conditional1]))
# Create prior on X(0).

View File

@ -27,8 +27,6 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
def test_nonlinear_hybrid(self):
nlfg = gtsam.HybridNonlinearFactorGraph()
dk = gtsam.DiscreteKeys()
dk.push_back((10, 2))
nlfg.push_back(
BetweenFactorPoint3(1, 2, Point3(1, 2, 3),
noiseModel.Diagonal.Variances([1, 1, 1])))
@ -40,7 +38,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
noiseModel.Unit.Create(3)), 0.0),
(PriorFactorPoint3(1, Point3(1, 2, 1),
noiseModel.Unit.Create(3)), 0.0)]
nlfg.push_back(gtsam.HybridNonlinearFactor([1], dk, factors))
nlfg.push_back(gtsam.HybridNonlinearFactor([1], (10, 2), factors))
nlfg.push_back(gtsam.DecisionTreeFactor((10, 2), "1 3"))
values = gtsam.Values()
values.insert_point3(1, Point3(0, 0, 0))