Merge pull request #1863 from borglab/feature/no_hiding

release/4.3a0
Varun Agrawal 2024-10-10 09:31:44 -04:00 committed by GitHub
commit def8b2526f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
26 changed files with 1434 additions and 823 deletions

View File

@ -1,8 +1 @@
BasedOnStyle: Google
BinPackArguments: false
BinPackParameters: false
ColumnLimit: 100
DerivePointerAlignment: false
IncludeBlocks: Preserve
PointerAlignment: Left

View File

@ -22,14 +22,13 @@
#include <gtsam/discrete/DiscreteFactorGraph.h>
#include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridBayesTree.h>
#include <gtsam/hybrid/HybridConditional.h>
#include <gtsam/inference/BayesTree-inst.h>
#include <gtsam/inference/BayesTreeCliqueBase-inst.h>
#include <gtsam/linear/GaussianJunctionTree.h>
#include <memory>
#include "gtsam/hybrid/HybridConditional.h"
namespace gtsam {
// Instantiate base class

View File

@ -13,6 +13,7 @@
* @file HybridConditional.cpp
* @date Mar 11, 2022
* @author Fan Jiang
* @author Varun Agrawal
*/
#include <gtsam/hybrid/HybridConditional.h>

View File

@ -13,6 +13,7 @@
* @file HybridConditional.h
* @date Mar 11, 2022
* @author Fan Jiang
* @author Varun Agrawal
*/
#pragma once

View File

@ -32,9 +32,6 @@ namespace gtsam {
class HybridValues;
/// Alias for DecisionTree of GaussianFactorGraphs
using GaussianFactorGraphTree = DecisionTree<Key, GaussianFactorGraph>;
KeyVector CollectKeys(const KeyVector &continuousKeys,
const DiscreteKeys &discreteKeys);
KeyVector CollectKeys(const KeyVector &keys1, const KeyVector &keys2);

View File

@ -32,6 +32,16 @@
namespace gtsam {
/* *******************************************************************************/
/**
* @brief Helper struct for constructing HybridGaussianConditional objects
*
* This struct contains the following fields:
* - nrFrontals: Optional size_t for number of frontal variables
* - pairs: FactorValuePairs for storing conditionals with their negLogConstant
* - conditionals: Conditionals for storing conditionals. TODO(frank): kill!
* - minNegLogConstant: minimum negLogConstant, computed here, subtracted in
* constructor
*/
struct HybridGaussianConditional::Helper {
std::optional<size_t> nrFrontals;
FactorValuePairs pairs;
@ -68,16 +78,12 @@ struct HybridGaussianConditional::Helper {
explicit Helper(const Conditionals &conditionals)
: conditionals(conditionals),
minNegLogConstant(std::numeric_limits<double>::infinity()) {
auto func = [this](const GC::shared_ptr &c) -> GaussianFactorValuePair {
double value = 0.0;
if (c) {
if (!nrFrontals.has_value()) {
nrFrontals = c->nrFrontals();
}
value = c->negLogConstant();
auto func = [this](const GC::shared_ptr &gc) -> GaussianFactorValuePair {
if (!gc) return {nullptr, std::numeric_limits<double>::infinity()};
if (!nrFrontals) nrFrontals = gc->nrFrontals();
double value = gc->negLogConstant();
minNegLogConstant = std::min(minNegLogConstant, value);
}
return {std::dynamic_pointer_cast<GaussianFactor>(c), value};
return {gc, value};
};
pairs = FactorValuePairs(conditionals, func);
if (!nrFrontals.has_value()) {
@ -91,7 +97,14 @@ struct HybridGaussianConditional::Helper {
/* *******************************************************************************/
HybridGaussianConditional::HybridGaussianConditional(
const DiscreteKeys &discreteParents, const Helper &helper)
: BaseFactor(discreteParents, helper.pairs),
: BaseFactor(discreteParents,
FactorValuePairs(helper.pairs,
[&](const GaussianFactorValuePair &
pair) { // subtract minNegLogConstant
return GaussianFactorValuePair{
pair.first,
pair.second - helper.minNegLogConstant};
})),
BaseConditional(*helper.nrFrontals),
conditionals_(helper.conditionals),
negLogConstant_(helper.minNegLogConstant) {}
@ -135,29 +148,6 @@ HybridGaussianConditional::conditionals() const {
return conditionals_;
}
/* *******************************************************************************/
GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree()
const {
auto wrap = [this](const GaussianConditional::shared_ptr &gc) {
// First check if conditional has not been pruned
if (gc) {
const double Cgm_Kgcm = gc->negLogConstant() - this->negLogConstant_;
// If there is a difference in the covariances, we need to account for
// that since the error is dependent on the mode.
if (Cgm_Kgcm > 0.0) {
// We add a constant factor which will be used when computing
// the probability of the discrete variables.
Vector c(1);
c << std::sqrt(2.0 * Cgm_Kgcm);
auto constantFactor = std::make_shared<JacobianFactor>(c);
return GaussianFactorGraph{gc, constantFactor};
}
}
return GaussianFactorGraph{gc};
};
return {conditionals_, wrap};
}
/* *******************************************************************************/
size_t HybridGaussianConditional::nrComponents() const {
size_t total = 0;
@ -192,9 +182,11 @@ bool HybridGaussianConditional::equals(const HybridFactor &lf,
// Check the base and the factors:
return BaseFactor::equals(*e, tol) &&
conditionals_.equals(
e->conditionals_, [tol](const auto &f1, const auto &f2) {
return (!f1 && !f2) || (f1 && f2 && f1->equals(*f2, tol));
conditionals_.equals(e->conditionals_,
[tol](const GaussianConditional::shared_ptr &f1,
const GaussianConditional::shared_ptr &f2) {
return (!f1 && !f2) ||
(f1 && f2 && f1->equals(*f2, tol));
});
}
@ -202,9 +194,6 @@ bool HybridGaussianConditional::equals(const HybridFactor &lf,
void HybridGaussianConditional::print(const std::string &s,
const KeyFormatter &formatter) const {
std::cout << (s.empty() ? "" : s + "\n");
if (isContinuous()) std::cout << "Continuous ";
if (isDiscrete()) std::cout << "Discrete ";
if (isHybrid()) std::cout << "Hybrid ";
BaseConditional::print("", formatter);
std::cout << " Discrete Keys = ";
for (auto &dk : discreteKeys()) {
@ -270,13 +259,7 @@ std::shared_ptr<HybridGaussianFactor> HybridGaussianConditional::likelihood(
-> GaussianFactorValuePair {
const auto likelihood_m = conditional->likelihood(given);
const double Cgm_Kgcm = conditional->negLogConstant() - negLogConstant_;
if (Cgm_Kgcm == 0.0) {
return {likelihood_m, 0.0};
} else {
// Add a constant to the likelihood in case the noise models
// are not all equal.
return {likelihood_m, Cgm_Kgcm};
}
});
return std::make_shared<HybridGaussianFactor>(discreteParentKeys,
likelihoods);

View File

@ -231,9 +231,6 @@ class GTSAM_EXPORT HybridGaussianConditional
HybridGaussianConditional(const DiscreteKeys &discreteParents,
const Helper &helper);
/// Convert to a DecisionTree of Gaussian factor graphs.
GaussianFactorGraphTree asGaussianFactorGraphTree() const;
/// Check whether `given` has values for all frontal keys.
bool allFrontalsGiven(const VectorValues &given) const;

View File

@ -18,83 +18,52 @@
* @date Mar 12, 2022
*/
#include <gtsam/base/types.h>
#include <gtsam/base/utilities.h>
#include <gtsam/discrete/DecisionTree-inl.h>
#include <gtsam/discrete/DecisionTree.h>
#include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridGaussianProductFactor.h>
#include <gtsam/hybrid/HybridValues.h>
#include <gtsam/linear/GaussianFactor.h>
#include <gtsam/linear/GaussianFactorGraph.h>
namespace gtsam {
/* *******************************************************************************/
HybridGaussianFactor::Factors HybridGaussianFactor::augment(
const FactorValuePairs &factors) {
// Find the minimum value so we can "proselytize" to positive values.
// Done because we can't have sqrt of negative numbers.
Factors gaussianFactors;
AlgebraicDecisionTree<Key> valueTree;
std::tie(gaussianFactors, valueTree) = unzip(factors);
// Compute minimum value for normalization.
double min_value = valueTree.min();
// Finally, update the [A|b] matrices.
auto update = [&min_value](const GaussianFactorValuePair &gfv) {
auto [gf, value] = gfv;
auto jf = std::dynamic_pointer_cast<JacobianFactor>(gf);
if (!jf) return gf;
double normalized_value = value - min_value;
// If the value is 0, do nothing
if (normalized_value == 0.0) return gf;
GaussianFactorGraph gfg;
gfg.push_back(jf);
Vector c(1);
// When hiding c inside the `b` vector, value == 0.5*c^2
c << std::sqrt(2.0 * normalized_value);
auto constantFactor = std::make_shared<JacobianFactor>(c);
gfg.push_back(constantFactor);
return std::dynamic_pointer_cast<GaussianFactor>(
std::make_shared<JacobianFactor>(gfg));
};
return Factors(factors, update);
}
/* *******************************************************************************/
struct HybridGaussianFactor::ConstructorHelper {
KeyVector continuousKeys; // Continuous keys extracted from factors
DiscreteKeys discreteKeys; // Discrete keys provided to the constructors
FactorValuePairs pairs; // Used only if factorsTree is empty
Factors factorsTree;
FactorValuePairs pairs; // The decision tree with factors and scalars
ConstructorHelper(const DiscreteKey &discreteKey,
const std::vector<GaussianFactor::shared_ptr> &factors)
/// Constructor for a single discrete key and a vector of Gaussian factors
ConstructorHelper(const DiscreteKey& discreteKey,
const std::vector<GaussianFactor::shared_ptr>& factors)
: discreteKeys({discreteKey}) {
// Extract continuous keys from the first non-null factor
for (const auto &factor : factors) {
for (const auto& factor : factors) {
if (factor && continuousKeys.empty()) {
continuousKeys = factor->keys();
break;
}
}
// Build the DecisionTree from the factor vector
factorsTree = Factors(discreteKeys, factors);
// Build the FactorValuePairs DecisionTree
pairs = FactorValuePairs(
DecisionTree<Key, GaussianFactor::shared_ptr>(discreteKeys, factors),
[](const sharedFactor& f) {
return std::pair{f,
f ? 0.0 : std::numeric_limits<double>::infinity()};
});
}
ConstructorHelper(const DiscreteKey &discreteKey,
const std::vector<GaussianFactorValuePair> &factorPairs)
/// Constructor for a single discrete key and a vector of
/// GaussianFactorValuePairs
ConstructorHelper(const DiscreteKey& discreteKey,
const std::vector<GaussianFactorValuePair>& factorPairs)
: discreteKeys({discreteKey}) {
// Extract continuous keys from the first non-null factor
for (const auto &pair : factorPairs) {
for (const GaussianFactorValuePair& pair : factorPairs) {
if (pair.first && continuousKeys.empty()) {
continuousKeys = pair.first->keys();
break;
@ -105,11 +74,14 @@ struct HybridGaussianFactor::ConstructorHelper {
pairs = FactorValuePairs(discreteKeys, factorPairs);
}
ConstructorHelper(const DiscreteKeys &discreteKeys,
const FactorValuePairs &factorPairs)
/// Constructor for a vector of discrete keys and a vector of
/// GaussianFactorValuePairs
ConstructorHelper(const DiscreteKeys& discreteKeys,
const FactorValuePairs& factorPairs)
: discreteKeys(discreteKeys) {
// Extract continuous keys from the first non-null factor
factorPairs.visit([&](const GaussianFactorValuePair &pair) {
// TODO: just stop after first non-null factor
factorPairs.visit([&](const GaussianFactorValuePair& pair) {
if (pair.first && continuousKeys.empty()) {
continuousKeys = pair.first->keys();
}
@ -121,31 +93,27 @@ struct HybridGaussianFactor::ConstructorHelper {
};
/* *******************************************************************************/
HybridGaussianFactor::HybridGaussianFactor(const ConstructorHelper &helper)
HybridGaussianFactor::HybridGaussianFactor(const ConstructorHelper& helper)
: Base(helper.continuousKeys, helper.discreteKeys),
factors_(helper.factorsTree.empty() ? augment(helper.pairs)
: helper.factorsTree) {}
factors_(helper.pairs) {}
/* *******************************************************************************/
HybridGaussianFactor::HybridGaussianFactor(
const DiscreteKey &discreteKey,
const std::vector<GaussianFactor::shared_ptr> &factors)
const DiscreteKey& discreteKey,
const std::vector<GaussianFactor::shared_ptr>& factors)
: HybridGaussianFactor(ConstructorHelper(discreteKey, factors)) {}
/* *******************************************************************************/
HybridGaussianFactor::HybridGaussianFactor(
const DiscreteKey &discreteKey,
const std::vector<GaussianFactorValuePair> &factorPairs)
const DiscreteKey& discreteKey,
const std::vector<GaussianFactorValuePair>& factorPairs)
: HybridGaussianFactor(ConstructorHelper(discreteKey, factorPairs)) {}
/* *******************************************************************************/
HybridGaussianFactor::HybridGaussianFactor(const DiscreteKeys &discreteKeys,
const FactorValuePairs &factors)
HybridGaussianFactor::HybridGaussianFactor(const DiscreteKeys& discreteKeys,
const FactorValuePairs& factors)
: HybridGaussianFactor(ConstructorHelper(discreteKeys, factors)) {}
/* *******************************************************************************/
bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const {
const This *e = dynamic_cast<const This *>(&lf);
bool HybridGaussianFactor::equals(const HybridFactor& lf, double tol) const {
const This* e = dynamic_cast<const This*>(&lf);
if (e == nullptr) return false;
// This will return false if either factors_ is empty or e->factors_ is
@ -153,17 +121,19 @@ bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const {
if (factors_.empty() ^ e->factors_.empty()) return false;
// Check the base and the factors:
return Base::equals(*e, tol) &&
factors_.equals(e->factors_, [tol](const auto &f1, const auto &f2) {
return (!f1 && !f2) || (f1 && f2 && f1->equals(*f2, tol));
});
auto compareFunc = [tol](const GaussianFactorValuePair& pair1,
const GaussianFactorValuePair& pair2) {
auto f1 = pair1.first, f2 = pair2.first;
bool match = (!f1 && !f2) || (f1 && f2 && f1->equals(*f2, tol));
return match && gtsam::equal(pair1.second, pair2.second, tol);
};
return Base::equals(*e, tol) && factors_.equals(e->factors_, compareFunc);
}
/* *******************************************************************************/
void HybridGaussianFactor::print(const std::string &s,
const KeyFormatter &formatter) const {
void HybridGaussianFactor::print(const std::string& s,
const KeyFormatter& formatter) const {
std::cout << (s.empty() ? "" : s + "\n");
std::cout << "HybridGaussianFactor" << std::endl;
HybridFactor::print("", formatter);
std::cout << "{\n";
if (factors_.empty()) {
@ -171,11 +141,12 @@ void HybridGaussianFactor::print(const std::string &s,
} else {
factors_.print(
"", [&](Key k) { return formatter(k); },
[&](const sharedFactor &gf) -> std::string {
[&](const GaussianFactorValuePair& pair) -> std::string {
RedirectCout rd;
std::cout << ":\n";
if (gf) {
gf->print("", formatter);
if (pair.first) {
pair.first->print("", formatter);
std::cout << "scalar: " << pair.second << "\n";
return rd.str();
} else {
return "nullptr";
@ -186,61 +157,47 @@ void HybridGaussianFactor::print(const std::string &s,
}
/* *******************************************************************************/
HybridGaussianFactor::sharedFactor HybridGaussianFactor::operator()(
const DiscreteValues &assignment) const {
GaussianFactorValuePair HybridGaussianFactor::operator()(
const DiscreteValues& assignment) const {
return factors_(assignment);
}
/* *******************************************************************************/
GaussianFactorGraphTree HybridGaussianFactor::add(
const GaussianFactorGraphTree &sum) const {
using Y = GaussianFactorGraph;
auto add = [](const Y &graph1, const Y &graph2) {
auto result = graph1;
result.push_back(graph2);
return result;
};
const auto tree = asGaussianFactorGraphTree();
return sum.empty() ? tree : sum.apply(tree, add);
HybridGaussianProductFactor HybridGaussianFactor::asProductFactor() const {
// Implemented by creating a new DecisionTree where:
// - The structure (keys and assignments) is preserved from factors_
// - Each leaf converted to a GaussianFactorGraph with just the factor and its
// scalar.
return {{factors_,
[](const GaussianFactorValuePair& pair)
-> std::pair<GaussianFactorGraph, double> {
return {GaussianFactorGraph{pair.first}, pair.second};
}}};
}
/* *******************************************************************************/
GaussianFactorGraphTree HybridGaussianFactor::asGaussianFactorGraphTree()
const {
auto wrap = [](const sharedFactor &gf) { return GaussianFactorGraph{gf}; };
return {factors_, wrap};
}
/* *******************************************************************************/
/// Helper method to compute the error of a component.
static double PotentiallyPrunedComponentError(
const GaussianFactor::shared_ptr &gf, const VectorValues &values) {
// Check if valid pointer
if (gf) {
return gf->error(values);
} else {
// If nullptr this component was pruned, so we return maximum error. This
// way the negative exponential will give a probability value close to 0.0.
return std::numeric_limits<double>::max();
}
inline static double PotentiallyPrunedComponentError(
const GaussianFactorValuePair& pair, const VectorValues& continuousValues) {
return pair.first ? pair.first->error(continuousValues) + pair.second
: std::numeric_limits<double>::infinity();
}
/* *******************************************************************************/
AlgebraicDecisionTree<Key> HybridGaussianFactor::errorTree(
const VectorValues &continuousValues) const {
const VectorValues& continuousValues) const {
// functor to convert from sharedFactor to double error value.
auto errorFunc = [&continuousValues](const sharedFactor &gf) {
return PotentiallyPrunedComponentError(gf, continuousValues);
auto errorFunc = [&continuousValues](const GaussianFactorValuePair& pair) {
return PotentiallyPrunedComponentError(pair, continuousValues);
};
DecisionTree<Key, double> error_tree(factors_, errorFunc);
return error_tree;
}
/* *******************************************************************************/
double HybridGaussianFactor::error(const HybridValues &values) const {
double HybridGaussianFactor::error(const HybridValues& values) const {
// Directly index to get the component, no need to build the whole tree.
const sharedFactor gf = factors_(values.discrete());
return PotentiallyPrunedComponentError(gf, values.continuous());
const GaussianFactorValuePair pair = factors_(values.discrete());
return PotentiallyPrunedComponentError(pair, values.continuous());
}
} // namespace gtsam

View File

@ -24,6 +24,7 @@
#include <gtsam/discrete/DecisionTree.h>
#include <gtsam/discrete/DiscreteKey.h>
#include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/hybrid/HybridGaussianProductFactor.h>
#include <gtsam/linear/GaussianFactor.h>
#include <gtsam/linear/GaussianFactorGraph.h>
@ -66,12 +67,10 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor {
/// typedef for Decision Tree of Gaussian factors and arbitrary value.
using FactorValuePairs = DecisionTree<Key, GaussianFactorValuePair>;
/// typedef for Decision Tree of Gaussian factors.
using Factors = DecisionTree<Key, sharedFactor>;
private:
/// Decision tree of Gaussian factors indexed by discrete keys.
Factors factors_;
FactorValuePairs factors_;
public:
/// @name Constructors
@ -110,10 +109,10 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor {
* The value ϕ(x,M) for the factor is again ϕ_m(x) + E_m.
*
* @param discreteKeys Discrete variables and their cardinalities.
* @param factors The decision tree of Gaussian factor/scalar pairs.
* @param factorPairs The decision tree of Gaussian factor/scalar pairs.
*/
HybridGaussianFactor(const DiscreteKeys &discreteKeys,
const FactorValuePairs &factors);
const FactorValuePairs &factorPairs);
/// @}
/// @name Testable
@ -129,17 +128,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor {
/// @{
/// Get factor at a given discrete assignment.
sharedFactor operator()(const DiscreteValues &assignment) const;
/**
* @brief Combine the Gaussian Factor Graphs in `sum` and `this` while
* maintaining the original tree structure.
*
* @param sum Decision Tree of Gaussian Factor Graphs indexed by the
* variables.
* @return Sum
*/
GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const;
GaussianFactorValuePair operator()(const DiscreteValues &assignment) const;
/**
* @brief Compute error of the HybridGaussianFactor as a tree.
@ -158,24 +147,16 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor {
double error(const HybridValues &values) const override;
/// Getter for GaussianFactor decision tree
const Factors &factors() const { return factors_; }
/// Add HybridNonlinearFactor to a Sum, syntactic sugar.
friend GaussianFactorGraphTree &operator+=(
GaussianFactorGraphTree &sum, const HybridGaussianFactor &factor) {
sum = factor.add(sum);
return sum;
}
/// @}
protected:
const FactorValuePairs &factors() const { return factors_; }
/**
* @brief Helper function to return factors and functional to create a
* DecisionTree of Gaussian Factor Graphs.
*
* @return GaussianFactorGraphTree
* @return HybridGaussianProductFactor
*/
GaussianFactorGraphTree asGaussianFactorGraphTree() const;
virtual HybridGaussianProductFactor asProductFactor() const;
/// @}
private:
/**
@ -184,10 +165,9 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor {
* value in the `b` vector as an additional row.
*
* @param factors DecisionTree of GaussianFactors and arbitrary scalars.
* Gaussian factor in factors.
* @return HybridGaussianFactor::Factors
* @return FactorValuePairs
*/
static Factors augment(const FactorValuePairs &factors);
static FactorValuePairs augment(const FactorValuePairs &factors);
/// Helper struct to assist private constructor below.
struct ConstructorHelper;

View File

@ -24,6 +24,7 @@
#include <gtsam/discrete/DiscreteFactorGraph.h>
#include <gtsam/discrete/DiscreteJunctionTree.h>
#include <gtsam/discrete/DiscreteKey.h>
#include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/HybridConditional.h>
#include <gtsam/hybrid/HybridEliminationTree.h>
#include <gtsam/hybrid/HybridFactor.h>
@ -40,7 +41,6 @@
#include <gtsam/linear/HessianFactor.h>
#include <gtsam/linear/JacobianFactor.h>
#include <algorithm>
#include <cstddef>
#include <iostream>
#include <memory>
@ -48,14 +48,21 @@
#include <utility>
#include <vector>
#include "gtsam/discrete/DecisionTreeFactor.h"
namespace gtsam {
/// Specialize EliminateableFactorGraph for HybridGaussianFactorGraph:
template class EliminateableFactorGraph<HybridGaussianFactorGraph>;
using OrphanWrapper = BayesTreeOrphanWrapper<HybridBayesTree::Clique>;
using std::dynamic_pointer_cast;
using OrphanWrapper = BayesTreeOrphanWrapper<HybridBayesTree::Clique>;
using Result =
std::pair<std::shared_ptr<GaussianConditional>, GaussianFactor::shared_ptr>;
using ResultValuePair = std::pair<Result, double>;
using ResultTree = DecisionTree<Key, ResultValuePair>;
static const VectorValues kEmpty;
/* ************************************************************************ */
// Throw a runtime exception for method specified in string s, and factor f:
@ -78,28 +85,57 @@ const Ordering HybridOrdering(const HybridGaussianFactorGraph &graph) {
static void printFactor(const std::shared_ptr<Factor> &factor,
const DiscreteValues &assignment,
const KeyFormatter &keyFormatter) {
if (auto hgf = std::dynamic_pointer_cast<HybridGaussianFactor>(factor)) {
if (auto hgf = dynamic_pointer_cast<HybridGaussianFactor>(factor)) {
if (assignment.empty()) {
hgf->print("HybridGaussianFactor:", keyFormatter);
} else {
hgf->operator()(assignment)
->print("HybridGaussianFactor, component:", keyFormatter);
} else if (auto gf = std::dynamic_pointer_cast<GaussianFactor>(factor)) {
.first->print("HybridGaussianFactor, component:", keyFormatter);
}
} else if (auto gf = dynamic_pointer_cast<GaussianFactor>(factor)) {
factor->print("GaussianFactor:\n", keyFormatter);
} else if (auto df = std::dynamic_pointer_cast<DiscreteFactor>(factor)) {
} else if (auto df = dynamic_pointer_cast<DiscreteFactor>(factor)) {
factor->print("DiscreteFactor:\n", keyFormatter);
} else if (auto hc = std::dynamic_pointer_cast<HybridConditional>(factor)) {
} else if (auto hc = dynamic_pointer_cast<HybridConditional>(factor)) {
if (hc->isContinuous()) {
factor->print("GaussianConditional:\n", keyFormatter);
} else if (hc->isDiscrete()) {
factor->print("DiscreteConditional:\n", keyFormatter);
} else {
if (assignment.empty()) {
hc->print("HybridConditional:", keyFormatter);
} else {
hc->asHybrid()
->choose(assignment)
->print("HybridConditional, component:\n", keyFormatter);
}
}
} else {
factor->print("Unknown factor type\n", keyFormatter);
}
}
/* ************************************************************************ */
void HybridGaussianFactorGraph::print(const std::string &s,
const KeyFormatter &keyFormatter) const {
std::cout << (s.empty() ? "" : s + " ") << std::endl;
std::cout << "size: " << size() << std::endl;
for (size_t i = 0; i < factors_.size(); i++) {
auto &&factor = factors_[i];
if (factor == nullptr) {
std::cout << "Factor " << i << ": nullptr\n";
continue;
}
// Print the factor
std::cout << "Factor " << i << "\n";
printFactor(factor, {}, keyFormatter);
std::cout << "\n";
}
std::cout.flush();
}
/* ************************************************************************ */
void HybridGaussianFactorGraph::printErrors(
const HybridValues &values, const std::string &str,
@ -128,42 +164,27 @@ void HybridGaussianFactorGraph::printErrors(
}
/* ************************************************************************ */
static GaussianFactorGraphTree addGaussian(
const GaussianFactorGraphTree &gfgTree,
const GaussianFactor::shared_ptr &factor) {
// If the decision tree is not initialized, then initialize it.
if (gfgTree.empty()) {
GaussianFactorGraph result{factor};
return GaussianFactorGraphTree(result);
} else {
auto add = [&factor](const GaussianFactorGraph &graph) {
auto result = graph;
result.push_back(factor);
return result;
};
return gfgTree.apply(add);
}
}
/* ************************************************************************ */
// TODO(dellaert): it's probably more efficient to first collect the discrete
// keys, and then loop over all assignments to populate a vector.
GaussianFactorGraphTree HybridGaussianFactorGraph::assembleGraphTree() const {
GaussianFactorGraphTree result;
HybridGaussianProductFactor HybridGaussianFactorGraph::collectProductFactor()
const {
HybridGaussianProductFactor result;
for (auto &f : factors_) {
// TODO(dellaert): just use a virtual method defined in HybridFactor.
if (auto gf = dynamic_pointer_cast<GaussianFactor>(f)) {
result = addGaussian(result, gf);
// TODO(dellaert): can we make this cleaner and less error-prone?
if (auto orphan = dynamic_pointer_cast<OrphanWrapper>(f)) {
continue; // Ignore OrphanWrapper
} else if (auto gf = dynamic_pointer_cast<GaussianFactor>(f)) {
result += gf;
} else if (auto gc = dynamic_pointer_cast<GaussianConditional>(f)) {
result += gc;
} else if (auto gmf = dynamic_pointer_cast<HybridGaussianFactor>(f)) {
result = gmf->add(result);
result += *gmf;
} else if (auto gm = dynamic_pointer_cast<HybridGaussianConditional>(f)) {
result = gm->add(result);
result += *gm; // handled above already?
} else if (auto hc = dynamic_pointer_cast<HybridConditional>(f)) {
if (auto gm = hc->asHybrid()) {
result = gm->add(result);
result += *gm;
} else if (auto g = hc->asGaussian()) {
result = addGaussian(result, g);
result += g;
} else {
// Has to be discrete.
// TODO(dellaert): in C++20, we can use std::visit.
@ -176,7 +197,7 @@ GaussianFactorGraphTree HybridGaussianFactorGraph::assembleGraphTree() const {
} else {
// TODO(dellaert): there was an unattributed comment here: We need to
// handle the case where the object is actually an BayesTreeOrphanWrapper!
throwRuntimeError("gtsam::assembleGraphTree", f);
throwRuntimeError("gtsam::collectProductFactor", f);
}
}
@ -208,22 +229,15 @@ continuousElimination(const HybridGaussianFactorGraph &factors,
}
/* ************************************************************************ */
/**
* @brief Exponentiate (not necessarily normalized) negative log-values,
* normalize, and then return as AlgebraicDecisionTree<Key>.
*
* @param logValues DecisionTree of (unnormalized) log values.
* @return AlgebraicDecisionTree<Key>
*/
static AlgebraicDecisionTree<Key> probabilitiesFromNegativeLogValues(
const AlgebraicDecisionTree<Key> &logValues) {
// Perform normalization
double min_log = logValues.min();
AlgebraicDecisionTree<Key> probabilities = DecisionTree<Key, double>(
logValues, [&min_log](const double x) { return exp(-(x - min_log)); });
probabilities = probabilities.normalize(probabilities.sum());
return probabilities;
/// Take negative log-values, shift them so that the minimum value is 0, and
/// then exponentiate to create a DecisionTreeFactor (not normalized yet!).
static DecisionTreeFactor::shared_ptr DiscreteFactorFromErrors(
const DiscreteKeys &discreteKeys,
const AlgebraicDecisionTree<Key> &errors) {
double min_log = errors.min();
AlgebraicDecisionTree<Key> potentials = DecisionTree<Key, double>(
errors, [&min_log](const double x) { return exp(-(x - min_log)); });
return std::make_shared<DecisionTreeFactor>(discreteKeys, potentials);
}
/* ************************************************************************ */
@ -237,21 +251,15 @@ discreteElimination(const HybridGaussianFactorGraph &factors,
dfg.push_back(df);
} else if (auto gmf = dynamic_pointer_cast<HybridGaussianFactor>(f)) {
// Case where we have a HybridGaussianFactor with no continuous keys.
// In this case, compute discrete probabilities.
auto logProbability =
[&](const GaussianFactor::shared_ptr &factor) -> double {
// If the factor is null, it is has been pruned hence return ∞
// so that the exp(-∞)=0.
// In this case, compute a discrete factor from the remaining error.
auto calculateError = [&](const auto &pair) -> double {
auto [factor, scalar] = pair;
// If factor is null, it has been pruned, hence return infinite error
if (!factor) return std::numeric_limits<double>::infinity();
return factor->error(VectorValues());
return scalar + factor->error(kEmpty);
};
AlgebraicDecisionTree<Key> logProbabilities =
DecisionTree<Key, double>(gmf->factors(), logProbability);
AlgebraicDecisionTree<Key> probabilities =
probabilitiesFromNegativeLogValues(logProbabilities);
dfg.emplace_shared<DecisionTreeFactor>(gmf->discreteKeys(),
probabilities);
DecisionTree<Key, double> errors(gmf->factors(), calculateError);
dfg.push_back(DiscreteFactorFromErrors(gmf->discreteKeys(), errors));
} else if (auto orphan = dynamic_pointer_cast<OrphanWrapper>(f)) {
// Ignore orphaned clique.
@ -272,24 +280,6 @@ discreteElimination(const HybridGaussianFactorGraph &factors,
}
/* ************************************************************************ */
// If any GaussianFactorGraph in the decision tree contains a nullptr, convert
// that leaf to an empty GaussianFactorGraph. Needed since the DecisionTree will
// otherwise create a GFG with a single (null) factor,
// which doesn't register as null.
GaussianFactorGraphTree removeEmpty(const GaussianFactorGraphTree &sum) {
auto emptyGaussian = [](const GaussianFactorGraph &graph) {
bool hasNull =
std::any_of(graph.begin(), graph.end(),
[](const GaussianFactor::shared_ptr &ptr) { return !ptr; });
return hasNull ? GaussianFactorGraph() : graph;
};
return GaussianFactorGraphTree(sum, emptyGaussian);
}
/* ************************************************************************ */
using Result = std::pair<std::shared_ptr<GaussianConditional>,
HybridGaussianFactor::sharedFactor>;
/**
* Compute the probability p(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m)
* from the residual error ||b||^2 at the mean μ.
@ -297,50 +287,48 @@ using Result = std::pair<std::shared_ptr<GaussianConditional>,
* depends on the discrete separator if present.
*/
static std::shared_ptr<Factor> createDiscreteFactor(
const DecisionTree<Key, Result> &eliminationResults,
const ResultTree &eliminationResults,
const DiscreteKeys &discreteSeparator) {
auto negLogProbability = [&](const Result &pair) -> double {
const auto &[conditional, factor] = pair;
static const VectorValues kEmpty;
// If the factor is null, it has been pruned, hence return ∞
// so that the exp(-∞)=0.
if (!factor) return std::numeric_limits<double>::infinity();
// Negative logspace version of:
// exp(-factor->error(kEmpty)) / conditional->normalizationConstant();
// = exp(-factor->error(kEmpty)) * \sqrt{|2πΣ|};
// log = -(-factor->error(kEmpty) + log(\sqrt{|2πΣ|}))
// = factor->error(kEmpty) - log(\sqrt{|2πΣ|})
// negLogConstant gives `-log(k)`
// which is `-log(k) = log(1/k) = log(\sqrt{|2πΣ|})`.
return factor->error(kEmpty) - conditional->negLogConstant();
auto calculateError = [&](const auto &pair) -> double {
const auto &[conditional, factor] = pair.first;
const double scalar = pair.second;
if (conditional && factor) {
// `error` has the following contributions:
// - the scalar is the sum of all mode-dependent constants
// - factor->error(kempty) is the error remaining after elimination
// - negLogK is what is given to the conditional to normalize
const double negLogK = conditional->negLogConstant();
return scalar + factor->error(kEmpty) - negLogK;
} else if (!conditional && !factor) {
// If the factor has been pruned, return infinite error
return std::numeric_limits<double>::infinity();
} else {
throw std::runtime_error("createDiscreteFactor has mixed NULLs");
}
};
AlgebraicDecisionTree<Key> negLogProbabilities(
DecisionTree<Key, double>(eliminationResults, negLogProbability));
AlgebraicDecisionTree<Key> probabilities =
probabilitiesFromNegativeLogValues(negLogProbabilities);
return std::make_shared<DecisionTreeFactor>(discreteSeparator, probabilities);
DecisionTree<Key, double> errors(eliminationResults, calculateError);
return DiscreteFactorFromErrors(discreteSeparator, errors);
}
/* *******************************************************************************/
// Create HybridGaussianFactor on the separator, taking care to correct
// for conditional constants.
static std::shared_ptr<Factor> createHybridGaussianFactor(
const DecisionTree<Key, Result> &eliminationResults,
const ResultTree &eliminationResults,
const DiscreteKeys &discreteSeparator) {
// Correct for the normalization constant used up by the conditional
auto correct = [&](const Result &pair) -> GaussianFactorValuePair {
const auto &[conditional, factor] = pair;
if (factor) {
auto hf = std::dynamic_pointer_cast<HessianFactor>(factor);
if (!hf) throw std::runtime_error("Expected HessianFactor!");
// Add 2.0 term since the constant term will be premultiplied by 0.5
// as per the Hessian definition,
// and negative since we want log(k)
hf->constantTerm() += -2.0 * conditional->negLogConstant();
auto correct = [&](const ResultValuePair &pair) -> GaussianFactorValuePair {
const auto &[conditional, factor] = pair.first;
const double scalar = pair.second;
if (conditional && factor) {
const double negLogK = conditional->negLogConstant();
return {factor, scalar - negLogK};
} else if (!conditional && !factor) {
return {nullptr, std::numeric_limits<double>::infinity()};
} else {
throw std::runtime_error("createHybridGaussianFactors has mixed NULLs");
}
return {factor, 0.0};
};
DecisionTree<Key, GaussianFactorValuePair> newFactors(eliminationResults,
correct);
@ -364,32 +352,40 @@ HybridGaussianFactorGraph::eliminate(const Ordering &keys) const {
DiscreteKeys discreteSeparator = GetDiscreteKeys(*this);
// Collect all the factors to create a set of Gaussian factor graphs in a
// decision tree indexed by all discrete keys involved.
GaussianFactorGraphTree factorGraphTree = assembleGraphTree();
// decision tree indexed by all discrete keys involved. Just like any hybrid
// factor, every assignment also has a scalar error, in this case the sum of
// all errors in the graph. This error is assignment-specific and accounts for
// any difference in noise models used.
HybridGaussianProductFactor productFactor = collectProductFactor();
// Convert factor graphs with a nullptr to an empty factor graph.
// This is done after assembly since it is non-trivial to keep track of which
// FG has a nullptr as we're looping over the factors.
factorGraphTree = removeEmpty(factorGraphTree);
auto prunedProductFactor = productFactor.removeEmpty();
// This is the elimination method on the leaf nodes
bool someContinuousLeft = false;
auto eliminate = [&](const GaussianFactorGraph &graph) -> Result {
auto eliminate = [&](const std::pair<GaussianFactorGraph, double> &pair)
-> std::pair<Result, double> {
const auto &[graph, scalar] = pair;
if (graph.empty()) {
return {nullptr, nullptr};
return {{nullptr, nullptr}, 0.0};
}
// Expensive elimination of product factor.
auto result = EliminatePreferCholesky(graph, keys);
auto result =
EliminatePreferCholesky(graph, keys); /// <<<<<< MOST COMPUTE IS HERE
// Record whether there any continuous variables left
someContinuousLeft |= !result.second->empty();
return result;
// We pass on the scalar unmodified.
return {result, scalar};
};
// Perform elimination!
DecisionTree<Key, Result> eliminationResults(factorGraphTree, eliminate);
ResultTree eliminationResults(prunedProductFactor, eliminate);
// If there are no more continuous parents we create a DiscreteFactor with the
// error for each discrete choice. Otherwise, create a HybridGaussianFactor
@ -401,7 +397,8 @@ HybridGaussianFactorGraph::eliminate(const Ordering &keys) const {
// Create the HybridGaussianConditional from the conditionals
HybridGaussianConditional::Conditionals conditionals(
eliminationResults, [](const Result &pair) { return pair.first; });
eliminationResults,
[](const ResultValuePair &pair) { return pair.first.first; });
auto hybridGaussian = std::make_shared<HybridGaussianConditional>(
discreteSeparator, conditionals);
@ -484,6 +481,7 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors,
} else if (hybrid_factor->isHybrid()) {
only_continuous = false;
only_discrete = false;
break;
}
} else if (auto cont_factor =
std::dynamic_pointer_cast<GaussianFactor>(factor)) {
@ -520,10 +518,11 @@ AlgebraicDecisionTree<Key> HybridGaussianFactorGraph::errorTree(
} else if (auto df = std::dynamic_pointer_cast<DiscreteFactor>(factor)) {
// If discrete, just add its errorTree as well
result = result + df->errorTree();
} else if (auto gf = dynamic_pointer_cast<GaussianFactor>(factor)) {
// For a continuous only factor, just add its error
result = result + gf->error(continuousValues);
} else {
// Everything else is a continuous only factor
HybridValues hv(continuousValues, DiscreteValues());
result = result + factor->error(hv); // NOTE: yes, you can add constants
throwRuntimeError("HybridGaussianFactorGraph::errorTree", factor);
}
}
return result;
@ -557,9 +556,14 @@ GaussianFactorGraph HybridGaussianFactorGraph::choose(
} else if (auto gc = std::dynamic_pointer_cast<GaussianConditional>(f)) {
gfg.push_back(gf);
} else if (auto hgf = std::dynamic_pointer_cast<HybridGaussianFactor>(f)) {
gfg.push_back((*hgf)(assignment));
gfg.push_back((*hgf)(assignment).first);
} else if (auto hgc = dynamic_pointer_cast<HybridGaussianConditional>(f)) {
gfg.push_back((*hgc)(assignment));
} else if (auto hc = dynamic_pointer_cast<HybridConditional>(f)) {
if (auto gc = hc->asGaussian())
gfg.push_back(gc);
else if (auto hgc = hc->asHybrid())
gfg.push_back((*hgc)(assignment));
} else {
continue;
}

View File

@ -145,10 +145,9 @@ class GTSAM_EXPORT HybridGaussianFactorGraph
/// @name Testable
/// @{
// TODO(dellaert): customize print and equals.
// void print(
// const std::string& s = "HybridGaussianFactorGraph",
// const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override;
void print(
const std::string& s = "HybridGaussianFactorGraph",
const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override;
/**
* @brief Print the errors of each factor in the hybrid factor graph.
@ -218,7 +217,7 @@ class GTSAM_EXPORT HybridGaussianFactorGraph
* one for A and one for B. The leaves of the tree will be the Gaussian
* factors that have only continuous keys.
*/
GaussianFactorGraphTree assembleGraphTree() const;
HybridGaussianProductFactor collectProductFactor() const;
/**
* @brief Eliminate the given continuous keys.

View File

@ -0,0 +1,112 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file HybridGaussianProductFactor.h
* @date Oct 2, 2024
* @author Frank Dellaert
* @author Varun Agrawal
*/
#include <gtsam/base/types.h>
#include <gtsam/discrete/DecisionTree.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridGaussianProductFactor.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <string>
namespace gtsam {
using Y = GaussianFactorGraphValuePair;
/* *******************************************************************************/
static Y add(const Y& y1, const Y& y2) {
GaussianFactorGraph result = y1.first;
result.push_back(y2.first);
return {result, y1.second + y2.second};
};
/* *******************************************************************************/
HybridGaussianProductFactor operator+(const HybridGaussianProductFactor& a,
const HybridGaussianProductFactor& b) {
return a.empty() ? b : HybridGaussianProductFactor(a.apply(b, add));
}
/* *******************************************************************************/
HybridGaussianProductFactor HybridGaussianProductFactor::operator+(
const HybridGaussianFactor& factor) const {
return *this + factor.asProductFactor();
}
/* *******************************************************************************/
HybridGaussianProductFactor HybridGaussianProductFactor::operator+(
const GaussianFactor::shared_ptr& factor) const {
return *this + HybridGaussianProductFactor(factor);
}
/* *******************************************************************************/
HybridGaussianProductFactor& HybridGaussianProductFactor::operator+=(
const GaussianFactor::shared_ptr& factor) {
*this = *this + factor;
return *this;
}
/* *******************************************************************************/
HybridGaussianProductFactor& HybridGaussianProductFactor::operator+=(
const HybridGaussianFactor& factor) {
*this = *this + factor;
return *this;
}
/* *******************************************************************************/
void HybridGaussianProductFactor::print(const std::string& s,
const KeyFormatter& formatter) const {
KeySet keys;
auto printer = [&](const Y& y) {
if (keys.empty()) keys = y.first.keys();
return "Graph of size " + std::to_string(y.first.size()) +
", scalar sum: " + std::to_string(y.second);
};
Base::print(s, formatter, printer);
if (!keys.empty()) {
std::cout << s << " Keys:";
for (auto&& key : keys) std::cout << " " << formatter(key);
std::cout << "." << std::endl;
}
}
/* *******************************************************************************/
bool HybridGaussianProductFactor::equals(
const HybridGaussianProductFactor& other, double tol) const {
return Base::equals(other, [tol](const Y& a, const Y& b) {
return a.first.equals(b.first, tol) && std::abs(a.second - b.second) < tol;
});
}
/* *******************************************************************************/
HybridGaussianProductFactor HybridGaussianProductFactor::removeEmpty() const {
auto emptyGaussian = [](const Y& y) {
bool hasNull =
std::any_of(y.first.begin(), y.first.end(),
[](const GaussianFactor::shared_ptr& ptr) { return !ptr; });
return hasNull ? Y{GaussianFactorGraph(), 0.0} : y;
};
return {Base(*this, emptyGaussian)};
}
/* *******************************************************************************/
std::istream& operator>>(std::istream& is, GaussianFactorGraphValuePair& pair) {
// Dummy, don't do anything
return is;
}
} // namespace gtsam

View File

@ -0,0 +1,147 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file HybridGaussianProductFactor.h
* @date Oct 2, 2024
* @author Frank Dellaert
* @author Varun Agrawal
*/
#pragma once
#include <gtsam/discrete/DecisionTree.h>
#include <gtsam/inference/Key.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <iostream>
namespace gtsam {
class HybridGaussianFactor;
using GaussianFactorGraphValuePair = std::pair<GaussianFactorGraph, double>;
/// Alias for DecisionTree of GaussianFactorGraphs and their scalar sums
class GTSAM_EXPORT HybridGaussianProductFactor
: public DecisionTree<Key, GaussianFactorGraphValuePair> {
public:
using Base = DecisionTree<Key, GaussianFactorGraphValuePair>;
/// @name Constructors
/// @{
/// Default constructor
HybridGaussianProductFactor() = default;
/**
* @brief Construct from a single factor
* @tparam FACTOR Factor type
* @param factor Shared pointer to the factor
*/
template <class FACTOR>
HybridGaussianProductFactor(const std::shared_ptr<FACTOR>& factor)
: Base(GaussianFactorGraphValuePair{GaussianFactorGraph{factor}, 0.0}) {}
/**
* @brief Construct from DecisionTree
* @param tree Decision tree to construct from
*/
HybridGaussianProductFactor(Base&& tree) : Base(std::move(tree)) {}
///@}
/// @name Operators
///@{
/// Add GaussianFactor into HybridGaussianProductFactor
HybridGaussianProductFactor operator+(
const GaussianFactor::shared_ptr& factor) const;
/// Add HybridGaussianFactor into HybridGaussianProductFactor
HybridGaussianProductFactor operator+(
const HybridGaussianFactor& factor) const;
/// Add-assign operator for GaussianFactor
HybridGaussianProductFactor& operator+=(
const GaussianFactor::shared_ptr& factor);
/// Add-assign operator for HybridGaussianFactor
HybridGaussianProductFactor& operator+=(const HybridGaussianFactor& factor);
///@}
/// @name Testable
/// @{
/**
* @brief Print the HybridGaussianProductFactor
* @param s Optional string to prepend
* @param formatter Optional key formatter
*/
void print(const std::string& s = "",
const KeyFormatter& formatter = DefaultKeyFormatter) const;
/**
* @brief Check if this HybridGaussianProductFactor is equal to another
* @param other The other HybridGaussianProductFactor to compare with
* @param tol Tolerance for floating point comparisons
* @return true if equal, false otherwise
*/
bool equals(const HybridGaussianProductFactor& other,
double tol = 1e-9) const;
/// @}
/// @name Other methods
///@{
/**
* @brief Remove empty GaussianFactorGraphs from the decision tree
* @return A new HybridGaussianProductFactor with empty GaussianFactorGraphs
* removed
*
* If any GaussianFactorGraph in the decision tree contains a nullptr, convert
* that leaf to an empty GaussianFactorGraph with zero scalar sum. This is
* needed because the DecisionTree will otherwise create a GaussianFactorGraph
* with a single (null) factor, which doesn't register as null.
*/
HybridGaussianProductFactor removeEmpty() const;
///@}
private:
#ifdef GTSAM_ENABLE_BOOST_SERIALIZATION
/** Serialization function */
friend class boost::serialization::access;
template <class Archive>
void serialize(Archive& ar, const unsigned int /*version*/) {
ar& BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base);
}
#endif
};
// Testable traits
template <>
struct traits<HybridGaussianProductFactor>
: public Testable<HybridGaussianProductFactor> {};
/**
* Create a dummy overload of >> for GaussianFactorGraphValuePair
* so that HybridGaussianProductFactor compiles
* with the constructor
* `DecisionTree(const std::vector<LabelC>& labelCs, const std::string& table)`.
*
* Needed to compile on Windows.
*/
std::istream& operator>>(std::istream& is, GaussianFactorGraphValuePair& pair);
} // namespace gtsam

View File

@ -210,4 +210,16 @@ AlgebraicDecisionTree<Key> HybridNonlinearFactorGraph::errorTree(
return result;
}
/* ************************************************************************ */
AlgebraicDecisionTree<Key> HybridNonlinearFactorGraph::discretePosterior(
const Values& continuousValues) const {
AlgebraicDecisionTree<Key> errors = this->errorTree(continuousValues);
AlgebraicDecisionTree<Key> p = errors.apply([](double error) {
// NOTE: The 0.5 term is handled by each factor
return exp(-error);
});
return p / p.sum();
}
/* ************************************************************************ */
} // namespace gtsam

View File

@ -103,6 +103,19 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph {
*/
AlgebraicDecisionTree<Key> errorTree(const Values& values) const;
/**
* @brief Computer posterior P(M|X=x) when all continuous values X are given.
* This is efficient as this simply takes -exp(.) of errorTree and normalizes.
*
* @note Not a DiscreteConditional as the cardinalities of the DiscreteKeys,
* which we would need, are hard to recover.
*
* @param continuousValues Continuous values x to condition on.
* @return DecisionTreeFactor
*/
AlgebraicDecisionTree<Key> discretePosterior(
const Values& continuousValues) const;
/// @}
};

View File

@ -60,17 +60,6 @@ double prob_m_z(double mu0, double mu1, double sigma0, double sigma1,
return p1 / (p0 + p1);
};
/// Given \phi(m;z)\phi(m) use eliminate to obtain P(m|z).
DiscreteConditional SolveHFG(const HybridGaussianFactorGraph &hfg) {
return *hfg.eliminateSequential()->at(0)->asDiscrete();
}
/// Given p(z,m) and z, convert to HFG and solve.
DiscreteConditional SolveHBN(const HybridBayesNet &hbn, double z) {
VectorValues given{{Z(0), Vector1(z)}};
return SolveHFG(hbn.toFactorGraph(given));
}
/*
* Test a Gaussian Mixture Model P(m)p(z|m) with same sigma.
* The posterior, as a function of z, should be a sigmoid function.
@ -88,7 +77,9 @@ TEST(GaussianMixture, GaussianMixtureModel) {
// At the halfway point between the means, we should get P(m|z)=0.5
double midway = mu1 - mu0;
auto pMid = SolveHBN(gmm, midway);
auto eliminationResult =
gmm.toFactorGraph({{Z(0), Vector1(midway)}}).eliminateSequential();
auto pMid = *eliminationResult->at(0)->asDiscrete();
EXPECT(assert_equal(DiscreteConditional(m, "60/40"), pMid));
// Everywhere else, the result should be a sigmoid.
@ -97,7 +88,9 @@ TEST(GaussianMixture, GaussianMixtureModel) {
const double expected = prob_m_z(mu0, mu1, sigma, sigma, z);
// Workflow 1: convert HBN to HFG and solve
auto posterior1 = SolveHBN(gmm, z);
auto eliminationResult1 =
gmm.toFactorGraph({{Z(0), Vector1(z)}}).eliminateSequential();
auto posterior1 = *eliminationResult1->at(0)->asDiscrete();
EXPECT_DOUBLES_EQUAL(expected, posterior1(m1Assignment), 1e-8);
// Workflow 2: directly specify HFG and solve
@ -105,7 +98,8 @@ TEST(GaussianMixture, GaussianMixtureModel) {
hfg1.emplace_shared<DecisionTreeFactor>(
m, std::vector{Gaussian(mu0, sigma, z), Gaussian(mu1, sigma, z)});
hfg1.push_back(mixing);
auto posterior2 = SolveHFG(hfg1);
auto eliminationResult2 = hfg1.eliminateSequential();
auto posterior2 = *eliminationResult2->at(0)->asDiscrete();
EXPECT_DOUBLES_EQUAL(expected, posterior2(m1Assignment), 1e-8);
}
}
@ -128,7 +122,23 @@ TEST(GaussianMixture, GaussianMixtureModel2) {
// We get zMax=3.1333 by finding the maximum value of the function, at which
// point the mode m==1 is about twice as probable as m==0.
double zMax = 3.133;
auto pMax = SolveHBN(gmm, zMax);
const VectorValues vv{{Z(0), Vector1(zMax)}};
auto gfg = gmm.toFactorGraph(vv);
// Equality of posteriors asserts that the elimination is correct (same ratios
// for all modes)
const auto& expectedDiscretePosterior = gmm.discretePosterior(vv);
EXPECT(assert_equal(expectedDiscretePosterior, gfg.discretePosterior(vv)));
// Eliminate the graph!
auto eliminationResultMax = gfg.eliminateSequential();
// Equality of posteriors asserts that the elimination is correct (same ratios
// for all modes)
EXPECT(assert_equal(expectedDiscretePosterior,
eliminationResultMax->discretePosterior(vv)));
auto pMax = *eliminationResultMax->at(0)->asDiscrete();
EXPECT(assert_equal(DiscreteConditional(m, "42/58"), pMax, 1e-4));
// Everywhere else, the result should be a bell curve like function.
@ -137,7 +147,9 @@ TEST(GaussianMixture, GaussianMixtureModel2) {
const double expected = prob_m_z(mu0, mu1, sigma0, sigma1, z);
// Workflow 1: convert HBN to HFG and solve
auto posterior1 = SolveHBN(gmm, z);
auto eliminationResult1 =
gmm.toFactorGraph({{Z(0), Vector1(z)}}).eliminateSequential();
auto posterior1 = *eliminationResult1->at(0)->asDiscrete();
EXPECT_DOUBLES_EQUAL(expected, posterior1(m1Assignment), 1e-8);
// Workflow 2: directly specify HFG and solve
@ -145,11 +157,11 @@ TEST(GaussianMixture, GaussianMixtureModel2) {
hfg.emplace_shared<DecisionTreeFactor>(
m, std::vector{Gaussian(mu0, sigma0, z), Gaussian(mu1, sigma1, z)});
hfg.push_back(mixing);
auto posterior2 = SolveHFG(hfg);
auto eliminationResult2 = hfg.eliminateSequential();
auto posterior2 = *eliminationResult2->at(0)->asDiscrete();
EXPECT_DOUBLES_EQUAL(expected, posterior2(m1Assignment), 1e-8);
}
}
/* ************************************************************************* */
int main() {
TestResult tr;

View File

@ -18,9 +18,12 @@
* @date December 2021
*/
#include <gtsam/base/Testable.h>
#include <gtsam/discrete/DiscreteFactor.h>
#include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridBayesTree.h>
#include <gtsam/hybrid/HybridConditional.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include "Switching.h"
@ -28,6 +31,7 @@
// Include for test suite
#include <CppUnitLite/TestHarness.h>
#include <memory>
using namespace std;
using namespace gtsam;
@ -113,7 +117,7 @@ TEST(HybridBayesNet, EvaluatePureDiscrete) {
}
/* ****************************************************************************/
// Test creation of a tiny hybrid Bayes net.
// Test API for a tiny hybrid Bayes net.
TEST(HybridBayesNet, Tiny) {
auto bayesNet = tiny::createHybridBayesNet(); // P(z|x,mode)P(x)P(mode)
EXPECT_LONGS_EQUAL(3, bayesNet.size());
@ -189,6 +193,20 @@ TEST(HybridBayesNet, Tiny) {
auto fg = bayesNet.toFactorGraph({{Z(0), Vector1(5.0)}});
EXPECT_LONGS_EQUAL(3, fg.size());
// Create the product factor for eliminating x0:
HybridGaussianFactorGraph factors_x0;
factors_x0.push_back(fg.at(0));
factors_x0.push_back(fg.at(1));
auto productFactor = factors_x0.collectProductFactor();
// Check that scalars are 0 and 1.79 (regression)
EXPECT_DOUBLES_EQUAL(0.0, productFactor({{M(0), 0}}).second, 1e-9);
EXPECT_DOUBLES_EQUAL(1.791759, productFactor({{M(0), 1}}).second, 1e-5);
// Call eliminate and check scalar:
auto result = factors_x0.eliminate({X(0)});
auto df = std::dynamic_pointer_cast<DecisionTreeFactor>(result.second);
// Check that the ratio of probPrime to evaluate is the same for all modes.
std::vector<double> ratio(2);
ratio[0] = std::exp(-fg.error(zero)) / bayesNet.evaluate(zero);

View File

@ -20,6 +20,9 @@
#include <gtsam/discrete/DiscreteFactorGraph.h>
#include <gtsam/hybrid/HybridBayesTree.h>
#include <gtsam/hybrid/HybridGaussianISAM.h>
#include <gtsam/inference/DotWriter.h>
#include <numeric>
#include "Switching.h"
@ -28,9 +31,320 @@
using namespace std;
using namespace gtsam;
using noiseModel::Isotropic;
using symbol_shorthand::D;
using symbol_shorthand::M;
using symbol_shorthand::X;
using symbol_shorthand::Y;
static const DiscreteKey m0(M(0), 2), m1(M(1), 2), m2(M(2), 2), m3(M(3), 2);
/* ************************************************************************* */
TEST(HybridGaussianFactorGraph, EliminateMultifrontal) {
// Test multifrontal elimination
HybridGaussianFactorGraph hfg;
// Add priors on x0 and c1
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
hfg.add(DecisionTreeFactor(m1, {2, 8}));
Ordering ordering;
ordering.push_back(X(0));
auto result = hfg.eliminatePartialMultifrontal(ordering);
EXPECT_LONGS_EQUAL(result.first->size(), 1);
EXPECT_LONGS_EQUAL(result.second->size(), 1);
}
/* ************************************************************************* */
namespace two {
std::vector<GaussianFactor::shared_ptr> components(Key key) {
return {std::make_shared<JacobianFactor>(key, I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(key, I_3x3, Vector3::Ones())};
}
} // namespace two
/* ************************************************************************* */
TEST(HybridGaussianFactorGraph,
HybridGaussianFactorGraphEliminateFullMultifrontalSimple) {
HybridGaussianFactorGraph hfg;
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
hfg.add(HybridGaussianFactor(m1, two::components(X(1))));
hfg.add(DecisionTreeFactor(m1, {2, 8}));
// TODO(Varun) Adding extra discrete variable not connected to continuous
// variable throws segfault
// hfg.add(DecisionTreeFactor({m1, m2, "1 2 3 4"));
HybridBayesTree::shared_ptr result = hfg.eliminateMultifrontal();
// The bayes tree should have 3 cliques
EXPECT_LONGS_EQUAL(3, result->size());
}
/* ************************************************************************* */
TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) {
HybridGaussianFactorGraph hfg;
// Prior on x0
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
// Factor between x0-x1
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
// Hybrid factor P(x1|c1)
hfg.add(HybridGaussianFactor(m1, two::components(X(1))));
// Prior factor on c1
hfg.add(DecisionTreeFactor(m1, {2, 8}));
// Get a constrained ordering keeping c1 last
auto ordering_full = HybridOrdering(hfg);
// Returns a Hybrid Bayes Tree with distribution P(x0|x1)P(x1|c1)P(c1)
HybridBayesTree::shared_ptr hbt = hfg.eliminateMultifrontal(ordering_full);
EXPECT_LONGS_EQUAL(3, hbt->size());
}
/* ************************************************************************* */
// Check assembling the Bayes Tree roots after we do partial elimination
TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
HybridGaussianFactorGraph hfg;
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1));
hfg.add(HybridGaussianFactor(m0, two::components(X(0))));
hfg.add(HybridGaussianFactor(m1, two::components(X(2))));
hfg.add(DecisionTreeFactor({m1, m2}, "1 2 3 4"));
hfg.add(JacobianFactor(X(3), I_3x3, X(4), -I_3x3, Z_3x1));
hfg.add(JacobianFactor(X(4), I_3x3, X(5), -I_3x3, Z_3x1));
hfg.add(HybridGaussianFactor(m3, two::components(X(3))));
hfg.add(HybridGaussianFactor(m2, two::components(X(5))));
auto ordering_full =
Ordering::ColamdConstrainedLast(hfg, {M(0), M(1), M(2), M(3)});
const auto [hbt, remaining] = hfg.eliminatePartialMultifrontal(ordering_full);
// 9 cliques in the bayes tree and 0 remaining variables to eliminate.
EXPECT_LONGS_EQUAL(9, hbt->size());
EXPECT_LONGS_EQUAL(0, remaining->size());
/*
(Fan) Explanation: the Junction tree will need to re-eliminate to get to the
marginal on X(1), which is not possible because it involves eliminating
discrete before continuous. The solution to this, however, is in Murphy02.
TLDR is that this is 1. expensive and 2. inexact. nevertheless it is doable.
And I believe that we should do this.
*/
}
/* ************************************************************************* */
void dotPrint(const HybridGaussianFactorGraph::shared_ptr& hfg,
const HybridBayesTree::shared_ptr& hbt,
const Ordering& ordering) {
DotWriter dw;
dw.positionHints['c'] = 2;
dw.positionHints['x'] = 1;
std::cout << hfg->dot(DefaultKeyFormatter, dw);
std::cout << "\n";
hbt->dot(std::cout);
std::cout << "\n";
std::cout << hfg->eliminateSequential(ordering)->dot(DefaultKeyFormatter, dw);
}
/* ************************************************************************* */
// TODO(fan): make a graph like Varun's paper one
TEST(HybridGaussianFactorGraph, Switching) {
auto N = 12;
auto hfg = makeSwitchingChain(N);
// X(5) will be the center, X(1-4), X(6-9)
// X(3), X(7)
// X(2), X(8)
// X(1), X(4), X(6), X(9)
// M(5) will be the center, M(1-4), M(6-8)
// M(3), M(7)
// M(1), M(4), M(2), M(6), M(8)
// auto ordering_full =
// Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7),
// X(5),
// M(1), M(4), M(2), M(6), M(8), M(3), M(7), M(5)});
KeyVector ordering;
{
std::vector<int> naturalX(N);
std::iota(naturalX.begin(), naturalX.end(), 1);
std::vector<Key> ordX;
std::transform(naturalX.begin(), naturalX.end(), std::back_inserter(ordX),
[](int x) { return X(x); });
auto [ndX, lvls] = makeBinaryOrdering(ordX);
std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering));
// TODO(dellaert): this has no effect!
for (auto& l : lvls) {
l = -l;
}
}
{
std::vector<int> naturalC(N - 1);
std::iota(naturalC.begin(), naturalC.end(), 1);
std::vector<Key> ordC;
std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC),
[](int x) { return M(x); });
// std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering));
const auto [ndC, lvls] = makeBinaryOrdering(ordC);
std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering));
}
auto ordering_full = Ordering(ordering);
const auto [hbt, remaining] =
hfg->eliminatePartialMultifrontal(ordering_full);
// 12 cliques in the bayes tree and 0 remaining variables to eliminate.
EXPECT_LONGS_EQUAL(12, hbt->size());
EXPECT_LONGS_EQUAL(0, remaining->size());
}
/* ************************************************************************* */
// TODO(fan): make a graph like Varun's paper one
TEST(HybridGaussianFactorGraph, SwitchingISAM) {
auto N = 11;
auto hfg = makeSwitchingChain(N);
// X(5) will be the center, X(1-4), X(6-9)
// X(3), X(7)
// X(2), X(8)
// X(1), X(4), X(6), X(9)
// M(5) will be the center, M(1-4), M(6-8)
// M(3), M(7)
// M(1), M(4), M(2), M(6), M(8)
// auto ordering_full =
// Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7),
// X(5),
// M(1), M(4), M(2), M(6), M(8), M(3), M(7), M(5)});
KeyVector ordering;
{
std::vector<int> naturalX(N);
std::iota(naturalX.begin(), naturalX.end(), 1);
std::vector<Key> ordX;
std::transform(naturalX.begin(), naturalX.end(), std::back_inserter(ordX),
[](int x) { return X(x); });
auto [ndX, lvls] = makeBinaryOrdering(ordX);
std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering));
// TODO(dellaert): this has no effect!
for (auto& l : lvls) {
l = -l;
}
}
{
std::vector<int> naturalC(N - 1);
std::iota(naturalC.begin(), naturalC.end(), 1);
std::vector<Key> ordC;
std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC),
[](int x) { return M(x); });
// std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering));
const auto [ndC, lvls] = makeBinaryOrdering(ordC);
std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering));
}
auto ordering_full = Ordering(ordering);
const auto [hbt, remaining] =
hfg->eliminatePartialMultifrontal(ordering_full);
auto new_fg = makeSwitchingChain(12);
auto isam = HybridGaussianISAM(*hbt);
// Run an ISAM update.
HybridGaussianFactorGraph factorGraph;
factorGraph.push_back(new_fg->at(new_fg->size() - 2));
factorGraph.push_back(new_fg->at(new_fg->size() - 1));
isam.update(factorGraph);
// ISAM should have 12 factors after the last update
EXPECT_LONGS_EQUAL(12, isam.size());
}
/* ************************************************************************* */
TEST(HybridGaussianFactorGraph, SwitchingTwoVar) {
const int N = 7;
auto hfg = makeSwitchingChain(N, X);
hfg->push_back(*makeSwitchingChain(N, Y, D));
for (int t = 1; t <= N; t++) {
hfg->add(JacobianFactor(X(t), I_3x3, Y(t), -I_3x3, Vector3(1.0, 0.0, 0.0)));
}
KeyVector ordering;
KeyVector naturalX(N);
std::iota(naturalX.begin(), naturalX.end(), 1);
KeyVector ordX;
for (size_t i = 1; i <= N; i++) {
ordX.emplace_back(X(i));
ordX.emplace_back(Y(i));
}
for (size_t i = 1; i <= N - 1; i++) {
ordX.emplace_back(M(i));
}
for (size_t i = 1; i <= N - 1; i++) {
ordX.emplace_back(D(i));
}
{
DotWriter dw;
dw.positionHints['x'] = 1;
dw.positionHints['c'] = 0;
dw.positionHints['d'] = 3;
dw.positionHints['y'] = 2;
// std::cout << hfg->dot(DefaultKeyFormatter, dw);
// std::cout << "\n";
}
{
DotWriter dw;
dw.positionHints['y'] = 9;
// dw.positionHints['c'] = 0;
// dw.positionHints['d'] = 3;
dw.positionHints['x'] = 1;
// std::cout << "\n";
// std::cout << hfg->eliminateSequential(Ordering(ordX))
// ->dot(DefaultKeyFormatter, dw);
// hfg->eliminateMultifrontal(Ordering(ordX))->dot(std::cout);
}
Ordering ordering_partial;
for (size_t i = 1; i <= N; i++) {
ordering_partial.emplace_back(X(i));
ordering_partial.emplace_back(Y(i));
}
const auto [hbn, remaining] =
hfg->eliminatePartialSequential(ordering_partial);
EXPECT_LONGS_EQUAL(14, hbn->size());
EXPECT_LONGS_EQUAL(11, remaining->size());
{
DotWriter dw;
dw.positionHints['x'] = 1;
dw.positionHints['c'] = 0;
dw.positionHints['d'] = 3;
dw.positionHints['y'] = 2;
// std::cout << remaining->dot(DefaultKeyFormatter, dw);
// std::cout << "\n";
}
}
/* ****************************************************************************/
// Test multifrontal optimize

View File

@ -37,8 +37,6 @@
// Include for test suite
#include <CppUnitLite/TestHarness.h>
#include <bitset>
#include "Switching.h"
using namespace std;
@ -517,8 +515,6 @@ TEST(HybridEstimation, CorrectnessViaSampling) {
// the normalizing term computed via the Bayes net determinant.
const HybridValues sample = bn->sample(&rng);
double expected_ratio = compute_ratio(sample);
// regression
EXPECT_DOUBLES_EQUAL(0.728588, expected_ratio, 1e-6);
// 3. Do sampling
constexpr int num_samples = 10;

View File

@ -18,6 +18,8 @@
* @date December 2021
*/
#include <gtsam/discrete/DecisionTree.h>
#include <gtsam/discrete/DiscreteKey.h>
#include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/HybridGaussianConditional.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
@ -28,9 +30,6 @@
#include <memory>
#include <vector>
#include "gtsam/discrete/DecisionTree.h"
#include "gtsam/discrete/DiscreteKey.h"
// Include for test suite
#include <CppUnitLite/TestHarness.h>
@ -217,30 +216,16 @@ TEST(HybridGaussianConditional, Likelihood2) {
// Check the detailed JacobianFactor calculation for mode==1.
{
// We have a JacobianFactor
const auto gf1 = (*likelihood)(assignment1);
const auto [gf1, _] = (*likelihood)(assignment1);
const auto jf1 = std::dynamic_pointer_cast<JacobianFactor>(gf1);
CHECK(jf1);
// It has 2 rows, not 1!
CHECK(jf1->rows() == 2);
// Check that the constant C1 is properly encoded in the JacobianFactor.
const double C1 =
conditionals[1]->negLogConstant() - hybrid_conditional.negLogConstant();
const double c1 = std::sqrt(2.0 * C1);
Vector expected_unwhitened(2);
expected_unwhitened << 4.9 - 5.0, -c1;
Vector actual_unwhitened = jf1->unweighted_error(vv);
EXPECT(assert_equal(expected_unwhitened, actual_unwhitened));
// Make sure the noise model does not touch it.
Vector expected_whitened(2);
expected_whitened << (4.9 - 5.0) / 3.0, -c1;
Vector actual_whitened = jf1->error_vector(vv);
EXPECT(assert_equal(expected_whitened, actual_whitened));
// Check that the error is equal to the conditional error:
EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), jf1->error(hv1), 1e-8);
// Check that the JacobianFactor error with constants is equal to the
// conditional error:
EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1),
jf1->error(hv1) + conditionals[1]->negLogConstant() -
hybrid_conditional.negLogConstant(),
1e-8);
}
// Check that the ratio of probPrime to evaluate is the same for all modes.

View File

@ -82,40 +82,25 @@ TEST(HybridGaussianFactor, ConstructorVariants) {
}
/* ************************************************************************* */
// "Add" two hybrid factors together.
TEST(HybridGaussianFactor, Sum) {
TEST(HybridGaussianFactor, Keys) {
using namespace test_constructor;
DiscreteKey m2(2, 3);
auto A3 = Matrix::Zero(2, 3);
auto f20 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
auto f21 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
auto f22 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
// TODO(Frank): why specify keys at all? And: keys in factor should be *all*
// keys, deviating from Kevin's scheme. Should we index DT on DiscreteKey?
// Design review!
HybridGaussianFactor hybridFactorA(m1, {f10, f11});
HybridGaussianFactor hybridFactorB(m2, {f20, f21, f22});
// Check the number of keys matches what we expect
EXPECT_LONGS_EQUAL(3, hybridFactorA.keys().size());
EXPECT_LONGS_EQUAL(2, hybridFactorA.continuousKeys().size());
EXPECT_LONGS_EQUAL(1, hybridFactorA.discreteKeys().size());
// Create sum of two hybrid factors: it will be a decision tree now on both
// discrete variables m1 and m2:
GaussianFactorGraphTree sum;
sum += hybridFactorA;
sum += hybridFactorB;
DiscreteKey m2(2, 3);
auto A3 = Matrix::Zero(2, 3);
auto f20 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
auto f21 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
auto f22 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
HybridGaussianFactor hybridFactorB(m2, {f20, f21, f22});
// Let's check that this worked:
Assignment<Key> mode;
mode[m1.first] = 1;
mode[m2.first] = 2;
auto actual = sum(mode);
EXPECT(actual.at(0) == f11);
EXPECT(actual.at(1) == f22);
// Check the number of keys matches what we expect
EXPECT_LONGS_EQUAL(3, hybridFactorB.keys().size());
EXPECT_LONGS_EQUAL(2, hybridFactorB.continuousKeys().size());
EXPECT_LONGS_EQUAL(1, hybridFactorB.discreteKeys().size());
}
/* ************************************************************************* */
@ -124,8 +109,7 @@ TEST(HybridGaussianFactor, Printing) {
HybridGaussianFactor hybridFactor(m1, {f10, f11});
std::string expected =
R"(HybridGaussianFactor
Hybrid [x1 x2; 1]{
R"(Hybrid [x1 x2; 1]{
Choice(1)
0 Leaf :
A[x1] = [
@ -138,6 +122,7 @@ Hybrid [x1 x2; 1]{
]
b = [ 0 0 ]
No noise model
scalar: 0
1 Leaf :
A[x1] = [
@ -150,6 +135,7 @@ Hybrid [x1 x2; 1]{
]
b = [ 0 0 ]
No noise model
scalar: 0
}
)";

View File

@ -13,38 +13,34 @@
* @file testHybridGaussianFactorGraph.cpp
* @date Mar 11, 2022
* @author Fan Jiang
* @author Varun Agrawal
* @author Frank Dellaert
*/
#include <CppUnitLite/Test.h>
#include <CppUnitLite/TestHarness.h>
#include <gtsam/base/Testable.h>
#include <gtsam/base/TestableAssertions.h>
#include <gtsam/base/Vector.h>
#include <gtsam/discrete/DecisionTreeFactor.h>
#include <gtsam/discrete/DiscreteKey.h>
#include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridBayesTree.h>
#include <gtsam/hybrid/HybridConditional.h>
#include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/hybrid/HybridGaussianConditional.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/hybrid/HybridGaussianISAM.h>
#include <gtsam/hybrid/HybridGaussianProductFactor.h>
#include <gtsam/hybrid/HybridValues.h>
#include <gtsam/inference/BayesNet.h>
#include <gtsam/inference/DotWriter.h>
#include <gtsam/inference/Key.h>
#include <gtsam/inference/Ordering.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/linear/JacobianFactor.h>
#include <algorithm>
#include <cstddef>
#include <functional>
#include <iostream>
#include <iterator>
#include <memory>
#include <numeric>
#include <vector>
#include "Switching.h"
@ -53,17 +49,15 @@
using namespace std;
using namespace gtsam;
using gtsam::symbol_shorthand::D;
using gtsam::symbol_shorthand::M;
using gtsam::symbol_shorthand::N;
using gtsam::symbol_shorthand::X;
using gtsam::symbol_shorthand::Y;
using gtsam::symbol_shorthand::Z;
// Set up sampling
std::mt19937_64 kRng(42);
static const DiscreteKey m1(M(1), 2);
static const DiscreteKey m0(M(0), 2), m1(M(1), 2), m2(M(2), 2);
/* ************************************************************************* */
TEST(HybridGaussianFactorGraph, Creation) {
@ -76,7 +70,7 @@ TEST(HybridGaussianFactorGraph, Creation) {
// Define a hybrid gaussian conditional P(x0|x1, c0)
// and add it to the factor graph.
HybridGaussianConditional gm(
{M(0), 2},
m0,
{std::make_shared<GaussianConditional>(X(0), Z_3x1, I_3x3, X(1), I_3x3),
std::make_shared<GaussianConditional>(X(0), Vector3::Ones(), I_3x3, X(1),
I_3x3)});
@ -97,22 +91,6 @@ TEST(HybridGaussianFactorGraph, EliminateSequential) {
EXPECT_LONGS_EQUAL(result.first->size(), 1);
}
/* ************************************************************************* */
TEST(HybridGaussianFactorGraph, EliminateMultifrontal) {
// Test multifrontal elimination
HybridGaussianFactorGraph hfg;
// Add priors on x0 and c1
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
hfg.add(DecisionTreeFactor(m1, {2, 8}));
Ordering ordering;
ordering.push_back(X(0));
auto result = hfg.eliminatePartialMultifrontal(ordering);
EXPECT_LONGS_EQUAL(result.first->size(), 1);
EXPECT_LONGS_EQUAL(result.second->size(), 1);
}
/* ************************************************************************* */
namespace two {
@ -138,7 +116,8 @@ TEST(HybridGaussianFactorGraph, hybridEliminationOneFactor) {
// Check that factor is discrete and correct
auto factor = std::dynamic_pointer_cast<DecisionTreeFactor>(result.second);
CHECK(factor);
EXPECT(assert_equal(DecisionTreeFactor{m1, "1 1"}, *factor));
// regression test
EXPECT(assert_equal(DecisionTreeFactor{m1, "1 1"}, *factor, 1e-5));
}
/* ************************************************************************* */
@ -178,7 +157,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) {
// Discrete probability table for c1
hfg.add(DecisionTreeFactor(m1, {2, 8}));
// Joint discrete probability table for c1, c2
hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"));
hfg.add(DecisionTreeFactor({m1, m2}, "1 2 3 4"));
HybridBayesNet::shared_ptr result = hfg.eliminateSequential();
@ -187,295 +166,219 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) {
}
/* ************************************************************************* */
TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) {
HybridGaussianFactorGraph hfg;
// Test API for the smallest switching network.
// None of these are regression tests.
TEST(HybridBayesNet, Switching) {
// Create switching network with two continuous variables and one discrete:
// ϕ(x0) ϕ(x0,x1,m0) ϕ(x1;z1) ϕ(m0)
const double betweenSigma = 0.3, priorSigma = 0.1;
Switching s(2, betweenSigma, priorSigma);
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
// Check size of linearized factor graph
const HybridGaussianFactorGraph &graph = s.linearizedFactorGraph;
EXPECT_LONGS_EQUAL(4, graph.size());
hfg.add(HybridGaussianFactor({M(1), 2}, two::components(X(1))));
// Create some continuous and discrete values
const VectorValues continuousValues{{X(0), Vector1(0.1)},
{X(1), Vector1(1.2)}};
const DiscreteValues modeZero{{M(0), 0}}, modeOne{{M(0), 1}};
hfg.add(DecisionTreeFactor(m1, {2, 8}));
// TODO(Varun) Adding extra discrete variable not connected to continuous
// variable throws segfault
// hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"));
// Get the hybrid gaussian factor and check it is as expected
auto hgf = std::dynamic_pointer_cast<HybridGaussianFactor>(graph.at(1));
CHECK(hgf);
HybridBayesTree::shared_ptr result = hfg.eliminateMultifrontal();
// Get factors and scalars for both modes
auto [factor0, scalar0] = (*hgf)(modeZero);
auto [factor1, scalar1] = (*hgf)(modeOne);
CHECK(factor0);
CHECK(factor1);
// The bayes tree should have 3 cliques
EXPECT_LONGS_EQUAL(3, result->size());
// GTSAM_PRINT(*result);
// GTSAM_PRINT(*result->marginalFactor(M(2)));
}
// Check scalars against negLogConstant of noise model
auto betweenModel = noiseModel::Isotropic::Sigma(1, betweenSigma);
EXPECT_DOUBLES_EQUAL(betweenModel->negLogConstant(), scalar0, 1e-9);
EXPECT_DOUBLES_EQUAL(betweenModel->negLogConstant(), scalar1, 1e-9);
/* ************************************************************************* */
TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) {
HybridGaussianFactorGraph hfg;
// Check error for M(0) = 0
const HybridValues values0{continuousValues, modeZero};
double expectedError0 = 0;
for (const auto &factor : graph) expectedError0 += factor->error(values0);
EXPECT_DOUBLES_EQUAL(expectedError0, graph.error(values0), 1e-5);
// Prior on x0
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
// Factor between x0-x1
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
// Check error for M(0) = 1
const HybridValues values1{continuousValues, modeOne};
double expectedError1 = 0;
for (const auto &factor : graph) expectedError1 += factor->error(values1);
EXPECT_DOUBLES_EQUAL(expectedError1, graph.error(values1), 1e-5);
// Hybrid factor P(x1|c1)
hfg.add(HybridGaussianFactor(m1, two::components(X(1))));
// Prior factor on c1
hfg.add(DecisionTreeFactor(m1, {2, 8}));
// Check errorTree
AlgebraicDecisionTree<Key> actualErrors = graph.errorTree(continuousValues);
// Create expected error tree
const AlgebraicDecisionTree<Key> expectedErrors(M(0), expectedError0,
expectedError1);
// Get a constrained ordering keeping c1 last
auto ordering_full = HybridOrdering(hfg);
// Check that the actual error tree matches the expected one
EXPECT(assert_equal(expectedErrors, actualErrors, 1e-5));
// Returns a Hybrid Bayes Tree with distribution P(x0|x1)P(x1|c1)P(c1)
HybridBayesTree::shared_ptr hbt = hfg.eliminateMultifrontal(ordering_full);
// Check probPrime
const double probPrime0 = graph.probPrime(values0);
EXPECT_DOUBLES_EQUAL(std::exp(-expectedError0), probPrime0, 1e-5);
EXPECT_LONGS_EQUAL(3, hbt->size());
}
const double probPrime1 = graph.probPrime(values1);
EXPECT_DOUBLES_EQUAL(std::exp(-expectedError1), probPrime1, 1e-5);
/* ************************************************************************* */
/*
* This test is about how to assemble the Bayes Tree roots after we do partial
* elimination
*/
TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
HybridGaussianFactorGraph hfg;
// Check discretePosterior
const AlgebraicDecisionTree<Key> graphPosterior =
graph.discretePosterior(continuousValues);
const double sum = probPrime0 + probPrime1;
const AlgebraicDecisionTree<Key> expectedPosterior(M(0), probPrime0 / sum,
probPrime1 / sum);
EXPECT(assert_equal(expectedPosterior, graphPosterior, 1e-5));
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1));
// Make the clique of factors connected to x0:
HybridGaussianFactorGraph factors_x0;
factors_x0.push_back(graph.at(0));
factors_x0.push_back(hgf);
hfg.add(HybridGaussianFactor({M(0), 2}, two::components(X(0))));
hfg.add(HybridGaussianFactor({M(1), 2}, two::components(X(2))));
// Test collectProductFactor
auto productFactor = factors_x0.collectProductFactor();
hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"));
// For M(0) = 0
auto [gaussianFactor0, actualScalar0] = productFactor(modeZero);
EXPECT(gaussianFactor0.size() == 2);
EXPECT_DOUBLES_EQUAL((*hgf)(modeZero).second, actualScalar0, 1e-5);
hfg.add(JacobianFactor(X(3), I_3x3, X(4), -I_3x3, Z_3x1));
hfg.add(JacobianFactor(X(4), I_3x3, X(5), -I_3x3, Z_3x1));
// For M(0) = 1
auto [gaussianFactor1, actualScalar1] = productFactor(modeOne);
EXPECT(gaussianFactor1.size() == 2);
EXPECT_DOUBLES_EQUAL((*hgf)(modeOne).second, actualScalar1, 1e-5);
hfg.add(HybridGaussianFactor({M(3), 2}, two::components(X(3))));
hfg.add(HybridGaussianFactor({M(2), 2}, two::components(X(5))));
// Test eliminate x0
const Ordering ordering{X(0)};
auto [conditional, factor] = factors_x0.eliminate(ordering);
auto ordering_full =
Ordering::ColamdConstrainedLast(hfg, {M(0), M(1), M(2), M(3)});
// Check the conditional
CHECK(conditional);
EXPECT(conditional->isHybrid());
auto p_x0_given_x1_m = conditional->asHybrid();
CHECK(p_x0_given_x1_m);
EXPECT(HybridGaussianConditional::CheckInvariants(*p_x0_given_x1_m, values1));
EXPECT_LONGS_EQUAL(1, p_x0_given_x1_m->nrFrontals()); // x0
EXPECT_LONGS_EQUAL(2, p_x0_given_x1_m->nrParents()); // x1, m0
const auto [hbt, remaining] = hfg.eliminatePartialMultifrontal(ordering_full);
// Check the remaining factor
EXPECT(factor);
EXPECT(std::dynamic_pointer_cast<HybridGaussianFactor>(factor));
auto phi_x1_m = std::dynamic_pointer_cast<HybridGaussianFactor>(factor);
EXPECT_LONGS_EQUAL(2, phi_x1_m->keys().size()); // x1, m0
// Check that the scalars incorporate the negative log constant of the
// conditional
EXPECT_DOUBLES_EQUAL(scalar0 - (*p_x0_given_x1_m)(modeZero)->negLogConstant(),
(*phi_x1_m)(modeZero).second, 1e-9);
EXPECT_DOUBLES_EQUAL(scalar1 - (*p_x0_given_x1_m)(modeOne)->negLogConstant(),
(*phi_x1_m)(modeOne).second, 1e-9);
// 9 cliques in the bayes tree and 0 remaining variables to eliminate.
EXPECT_LONGS_EQUAL(9, hbt->size());
EXPECT_LONGS_EQUAL(0, remaining->size());
// Check that the conditional and remaining factor are consistent for both
// modes
for (auto &&mode : {modeZero, modeOne}) {
const auto gc = (*p_x0_given_x1_m)(mode);
const auto [gf, scalar] = (*phi_x1_m)(mode);
/*
(Fan) Explanation: the Junction tree will need to re-eliminate to get to the
marginal on X(1), which is not possible because it involves eliminating
discrete before continuous. The solution to this, however, is in Murphy02.
TLDR is that this is 1. expensive and 2. inexact. nevertheless it is doable.
And I believe that we should do this.
*/
}
void dotPrint(const HybridGaussianFactorGraph::shared_ptr &hfg,
const HybridBayesTree::shared_ptr &hbt,
const Ordering &ordering) {
DotWriter dw;
dw.positionHints['c'] = 2;
dw.positionHints['x'] = 1;
std::cout << hfg->dot(DefaultKeyFormatter, dw);
std::cout << "\n";
hbt->dot(std::cout);
std::cout << "\n";
std::cout << hfg->eliminateSequential(ordering)->dot(DefaultKeyFormatter, dw);
}
/* ************************************************************************* */
// TODO(fan): make a graph like Varun's paper one
TEST(HybridGaussianFactorGraph, Switching) {
auto N = 12;
auto hfg = makeSwitchingChain(N);
// X(5) will be the center, X(1-4), X(6-9)
// X(3), X(7)
// X(2), X(8)
// X(1), X(4), X(6), X(9)
// M(5) will be the center, M(1-4), M(6-8)
// M(3), M(7)
// M(1), M(4), M(2), M(6), M(8)
// auto ordering_full =
// Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7),
// X(5),
// M(1), M(4), M(2), M(6), M(8), M(3), M(7), M(5)});
KeyVector ordering;
{
std::vector<int> naturalX(N);
std::iota(naturalX.begin(), naturalX.end(), 1);
std::vector<Key> ordX;
std::transform(naturalX.begin(), naturalX.end(), std::back_inserter(ordX),
[](int x) { return X(x); });
auto [ndX, lvls] = makeBinaryOrdering(ordX);
std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering));
// TODO(dellaert): this has no effect!
for (auto &l : lvls) {
l = -l;
}
}
{
std::vector<int> naturalC(N - 1);
std::iota(naturalC.begin(), naturalC.end(), 1);
std::vector<Key> ordC;
std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC),
[](int x) { return M(x); });
// std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering));
const auto [ndC, lvls] = makeBinaryOrdering(ordC);
std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering));
}
auto ordering_full = Ordering(ordering);
// GTSAM_PRINT(*hfg);
// GTSAM_PRINT(ordering_full);
const auto [hbt, remaining] =
hfg->eliminatePartialMultifrontal(ordering_full);
// 12 cliques in the bayes tree and 0 remaining variables to eliminate.
EXPECT_LONGS_EQUAL(12, hbt->size());
EXPECT_LONGS_EQUAL(0, remaining->size());
}
/* ************************************************************************* */
// TODO(fan): make a graph like Varun's paper one
TEST(HybridGaussianFactorGraph, SwitchingISAM) {
auto N = 11;
auto hfg = makeSwitchingChain(N);
// X(5) will be the center, X(1-4), X(6-9)
// X(3), X(7)
// X(2), X(8)
// X(1), X(4), X(6), X(9)
// M(5) will be the center, M(1-4), M(6-8)
// M(3), M(7)
// M(1), M(4), M(2), M(6), M(8)
// auto ordering_full =
// Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7),
// X(5),
// M(1), M(4), M(2), M(6), M(8), M(3), M(7), M(5)});
KeyVector ordering;
{
std::vector<int> naturalX(N);
std::iota(naturalX.begin(), naturalX.end(), 1);
std::vector<Key> ordX;
std::transform(naturalX.begin(), naturalX.end(), std::back_inserter(ordX),
[](int x) { return X(x); });
auto [ndX, lvls] = makeBinaryOrdering(ordX);
std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering));
// TODO(dellaert): this has no effect!
for (auto &l : lvls) {
l = -l;
}
}
{
std::vector<int> naturalC(N - 1);
std::iota(naturalC.begin(), naturalC.end(), 1);
std::vector<Key> ordC;
std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC),
[](int x) { return M(x); });
// std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering));
const auto [ndC, lvls] = makeBinaryOrdering(ordC);
std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering));
}
auto ordering_full = Ordering(ordering);
const auto [hbt, remaining] =
hfg->eliminatePartialMultifrontal(ordering_full);
auto new_fg = makeSwitchingChain(12);
auto isam = HybridGaussianISAM(*hbt);
// Run an ISAM update.
HybridGaussianFactorGraph factorGraph;
factorGraph.push_back(new_fg->at(new_fg->size() - 2));
factorGraph.push_back(new_fg->at(new_fg->size() - 1));
isam.update(factorGraph);
// ISAM should have 12 factors after the last update
EXPECT_LONGS_EQUAL(12, isam.size());
}
/* ************************************************************************* */
TEST(HybridGaussianFactorGraph, SwitchingTwoVar) {
const int N = 7;
auto hfg = makeSwitchingChain(N, X);
hfg->push_back(*makeSwitchingChain(N, Y, D));
for (int t = 1; t <= N; t++) {
hfg->add(JacobianFactor(X(t), I_3x3, Y(t), -I_3x3, Vector3(1.0, 0.0, 0.0)));
// The error of the original factors should equal the sum of errors of the
// conditional and remaining factor, modulo the normalization constant of
// the conditional.
double originalError = factors_x0.error({continuousValues, mode});
const double actualError = gc->negLogConstant() +
gc->error(continuousValues) +
gf->error(continuousValues) + scalar;
EXPECT_DOUBLES_EQUAL(originalError, actualError, 1e-9);
}
KeyVector ordering;
// Create a clique for x1
HybridGaussianFactorGraph factors_x1;
factors_x1.push_back(
factor); // Use the remaining factor from previous elimination
factors_x1.push_back(
graph.at(2)); // Add the factor for x1 from the original graph
KeyVector naturalX(N);
std::iota(naturalX.begin(), naturalX.end(), 1);
KeyVector ordX;
for (size_t i = 1; i <= N; i++) {
ordX.emplace_back(X(i));
ordX.emplace_back(Y(i));
}
// Test collectProductFactor for x1 clique
auto productFactor_x1 = factors_x1.collectProductFactor();
for (size_t i = 1; i <= N - 1; i++) {
ordX.emplace_back(M(i));
}
for (size_t i = 1; i <= N - 1; i++) {
ordX.emplace_back(D(i));
}
// For M(0) = 0
auto [gaussianFactor_x1_0, actualScalar_x1_0] = productFactor_x1(modeZero);
EXPECT_LONGS_EQUAL(2, gaussianFactor_x1_0.size());
// NOTE(Frank): prior on x1 does not contribute to the scalar
EXPECT_DOUBLES_EQUAL((*phi_x1_m)(modeZero).second, actualScalar_x1_0, 1e-5);
{
DotWriter dw;
dw.positionHints['x'] = 1;
dw.positionHints['c'] = 0;
dw.positionHints['d'] = 3;
dw.positionHints['y'] = 2;
// std::cout << hfg->dot(DefaultKeyFormatter, dw);
// std::cout << "\n";
}
// For M(0) = 1
auto [gaussianFactor_x1_1, actualScalar_x1_1] = productFactor_x1(modeOne);
EXPECT_LONGS_EQUAL(2, gaussianFactor_x1_1.size());
// NOTE(Frank): prior on x1 does not contribute to the scalar
EXPECT_DOUBLES_EQUAL((*phi_x1_m)(modeOne).second, actualScalar_x1_1, 1e-5);
{
DotWriter dw;
dw.positionHints['y'] = 9;
// dw.positionHints['c'] = 0;
// dw.positionHints['d'] = 3;
dw.positionHints['x'] = 1;
// std::cout << "\n";
// std::cout << hfg->eliminateSequential(Ordering(ordX))
// ->dot(DefaultKeyFormatter, dw);
// hfg->eliminateMultifrontal(Ordering(ordX))->dot(std::cout);
}
// Test eliminate for x1 clique
Ordering ordering_x1{X(1)};
auto [conditional_x1, factor_x1] = factors_x1.eliminate(ordering_x1);
Ordering ordering_partial;
for (size_t i = 1; i <= N; i++) {
ordering_partial.emplace_back(X(i));
ordering_partial.emplace_back(Y(i));
}
const auto [hbn, remaining] =
hfg->eliminatePartialSequential(ordering_partial);
// Check the conditional for x1
CHECK(conditional_x1);
EXPECT(conditional_x1->isHybrid());
auto p_x1_given_m = conditional_x1->asHybrid();
CHECK(p_x1_given_m);
EXPECT_LONGS_EQUAL(1, p_x1_given_m->nrFrontals()); // x1
EXPECT_LONGS_EQUAL(1, p_x1_given_m->nrParents()); // m0
EXPECT_LONGS_EQUAL(14, hbn->size());
EXPECT_LONGS_EQUAL(11, remaining->size());
// Check the remaining factor for x1
CHECK(factor_x1);
auto phi_x1 = std::dynamic_pointer_cast<DecisionTreeFactor>(factor_x1);
CHECK(phi_x1);
EXPECT_LONGS_EQUAL(1, phi_x1->keys().size()); // m0
// We can't really check the error of the decision tree factor phi_x1, because
// the continuous factor whose error(kEmpty) we need is not available..
{
DotWriter dw;
dw.positionHints['x'] = 1;
dw.positionHints['c'] = 0;
dw.positionHints['d'] = 3;
dw.positionHints['y'] = 2;
// std::cout << remaining->dot(DefaultKeyFormatter, dw);
// std::cout << "\n";
}
// Now test full elimination of the graph:
auto hybridBayesNet = graph.eliminateSequential();
CHECK(hybridBayesNet);
// Check that the posterior P(M|X=continuousValues) from the Bayes net is the
// same as the same posterior from the graph. This is a sanity check that the
// elimination is done correctly.
AlgebraicDecisionTree<Key> bnPosterior =
hybridBayesNet->discretePosterior(continuousValues);
EXPECT(assert_equal(graphPosterior, bnPosterior));
}
/* ****************************************************************************/
// Test subset of API for switching network with 3 states.
// None of these are regression tests.
TEST(HybridGaussianFactorGraph, ErrorAndProbPrime) {
// Create switching network with three continuous variables and two discrete:
// ϕ(x0) ϕ(x0,x1,m0) ϕ(x1,x2,m1) ϕ(x1;z1) ϕ(x2;z2) ϕ(m0) ϕ(m0,m1)
Switching s(3);
// Check size of linearized factor graph
const HybridGaussianFactorGraph &graph = s.linearizedFactorGraph;
EXPECT_LONGS_EQUAL(7, graph.size());
// Eliminate the graph
const HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential();
const HybridValues delta = hybridBayesNet->optimize();
const double error = graph.error(delta);
// Check that the probability prime is the exponential of the error
EXPECT(assert_equal(graph.probPrime(delta), exp(-error), 1e-7));
// Check that the posterior P(M|X=continuousValues) from the Bayes net is the
// same as the same posterior from the graph. This is a sanity check that the
// elimination is done correctly.
const AlgebraicDecisionTree<Key> graphPosterior =
graph.discretePosterior(delta.continuous());
const AlgebraicDecisionTree<Key> bnPosterior =
hybridBayesNet->discretePosterior(delta.continuous());
EXPECT(assert_equal(graphPosterior, bnPosterior));
}
/* ************************************************************************* */
// Select a particular continuous factor graph given a discrete assignment
TEST(HybridGaussianFactorGraph, DiscreteSelection) {
Switching s(3);
@ -546,23 +449,43 @@ TEST(HybridGaussianFactorGraph, optimize) {
// Test adding of gaussian conditional and re-elimination.
TEST(HybridGaussianFactorGraph, Conditionals) {
Switching switching(4);
HybridGaussianFactorGraph hfg;
hfg.push_back(switching.linearizedFactorGraph.at(0)); // P(X1)
HybridGaussianFactorGraph hfg;
hfg.push_back(switching.linearizedFactorGraph.at(0)); // P(X0)
Ordering ordering;
ordering.push_back(X(0));
HybridBayesNet::shared_ptr bayes_net = hfg.eliminateSequential(ordering);
hfg.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1)
hfg.push_back(*bayes_net);
hfg.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2)
hfg.push_back(switching.linearizedFactorGraph.at(5)); // P(M1)
ordering.push_back(X(1));
ordering.push_back(X(2));
ordering.push_back(M(0));
ordering.push_back(M(1));
HybridGaussianFactorGraph hfg2;
hfg2.push_back(*bayes_net); // P(X0)
hfg2.push_back(switching.linearizedFactorGraph.at(1)); // P(X0, X1 | M0)
hfg2.push_back(switching.linearizedFactorGraph.at(2)); // P(X1, X2 | M1)
hfg2.push_back(switching.linearizedFactorGraph.at(5)); // P(M1)
ordering += X(1), X(2), M(0), M(1);
bayes_net = hfg.eliminateSequential(ordering);
// Created product of first two factors and check eliminate:
HybridGaussianFactorGraph fragment;
fragment.push_back(hfg2[0]);
fragment.push_back(hfg2[1]);
// Check that product
HybridGaussianProductFactor product = fragment.collectProductFactor();
auto leaf = fragment(DiscreteValues{{M(0), 0}});
EXPECT_LONGS_EQUAL(2, leaf.size());
// Check product and that pruneEmpty does not touch it
auto pruned = product.removeEmpty();
LONGS_EQUAL(2, pruned.nrLeaves());
// Test eliminate
auto [hybridConditional, factor] = fragment.eliminate({X(0)});
EXPECT(hybridConditional->isHybrid());
EXPECT(hybridConditional->keys() == KeyVector({X(0), X(1), M(0)}));
EXPECT(dynamic_pointer_cast<HybridGaussianFactor>(factor));
EXPECT(factor->keys() == KeyVector({X(1), M(0)}));
bayes_net = hfg2.eliminateSequential(ordering);
HybridValues result = bayes_net->optimize();
@ -581,51 +504,6 @@ TEST(HybridGaussianFactorGraph, Conditionals) {
EXPECT(assert_equal(expected_discrete, result.discrete()));
}
/* ****************************************************************************/
// Test hybrid gaussian factor graph error and unnormalized probabilities
TEST(HybridGaussianFactorGraph, ErrorAndProbPrime) {
Switching s(3);
HybridGaussianFactorGraph graph = s.linearizedFactorGraph;
HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential();
const HybridValues delta = hybridBayesNet->optimize();
const double error = graph.error(delta);
// regression
EXPECT(assert_equal(1.58886, error, 1e-5));
// Real test:
EXPECT(assert_equal(graph.probPrime(delta), exp(-error), 1e-7));
}
/* ****************************************************************************/
// Test hybrid gaussian factor graph error and unnormalized probabilities
TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) {
// Create switching network with three continuous variables and two discrete:
// ϕ(x0) ϕ(x0,x1,m0) ϕ(x1,x2,m1) ϕ(x0;z0) ϕ(x1;z1) ϕ(x2;z2) ϕ(m0) ϕ(m0,m1)
Switching s(3);
const HybridGaussianFactorGraph &graph = s.linearizedFactorGraph;
const HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential();
const HybridValues delta = hybridBayesNet->optimize();
// regression test for errorTree
std::vector<double> leaves = {2.7916153, 1.5888555, 1.7233422, 1.6191947};
AlgebraicDecisionTree<Key> expectedErrors(s.modes, leaves);
const auto error_tree = graph.errorTree(delta.continuous());
EXPECT(assert_equal(expectedErrors, error_tree, 1e-7));
// regression test for discretePosterior
const AlgebraicDecisionTree<Key> expectedPosterior(
s.modes, std::vector{0.095516068, 0.31800092, 0.27798511, 0.3084979});
auto posterior = graph.discretePosterior(delta.continuous());
EXPECT(assert_equal(expectedPosterior, posterior, 1e-7));
}
/* ****************************************************************************/
// Test hybrid gaussian factor graph errorTree during incremental operation
TEST(HybridGaussianFactorGraph, IncrementalErrorTree) {
@ -643,15 +521,13 @@ TEST(HybridGaussianFactorGraph, IncrementalErrorTree) {
HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential();
EXPECT_LONGS_EQUAL(5, hybridBayesNet->size());
// Check discrete posterior at optimum
HybridValues delta = hybridBayesNet->optimize();
auto error_tree = graph.errorTree(delta.continuous());
std::vector<DiscreteKey> discrete_keys = {{M(0), 2}, {M(1), 2}};
std::vector<double> leaves = {2.7916153, 1.5888555, 1.7233422, 1.6191947};
AlgebraicDecisionTree<Key> expected_error(discrete_keys, leaves);
// regression
EXPECT(assert_equal(expected_error, error_tree, 1e-7));
AlgebraicDecisionTree<Key> graphPosterior =
graph.discretePosterior(delta.continuous());
AlgebraicDecisionTree<Key> bnPosterior =
hybridBayesNet->discretePosterior(delta.continuous());
EXPECT(assert_equal(graphPosterior, bnPosterior));
graph = HybridGaussianFactorGraph();
graph.push_back(*hybridBayesNet);
@ -662,26 +538,21 @@ TEST(HybridGaussianFactorGraph, IncrementalErrorTree) {
EXPECT_LONGS_EQUAL(7, hybridBayesNet->size());
delta = hybridBayesNet->optimize();
auto error_tree2 = graph.errorTree(delta.continuous());
// regression
leaves = {0.50985198, 0.0097577296, 0.50009425, 0,
0.52922138, 0.029127133, 0.50985105, 0.0097567964};
AlgebraicDecisionTree<Key> expected_error2(s.modes, leaves);
EXPECT(assert_equal(expected_error, error_tree, 1e-7));
graphPosterior = graph.discretePosterior(delta.continuous());
bnPosterior = hybridBayesNet->discretePosterior(delta.continuous());
EXPECT(assert_equal(graphPosterior, bnPosterior));
}
/* ****************************************************************************/
// Check that assembleGraphTree assembles Gaussian factor graphs for each
// assignment.
TEST(HybridGaussianFactorGraph, assembleGraphTree) {
// Check that collectProductFactor works correctly.
TEST(HybridGaussianFactorGraph, collectProductFactor) {
const int num_measurements = 1;
auto fg = tiny::createHybridGaussianFactorGraph(
num_measurements, VectorValues{{Z(0), Vector1(5.0)}});
VectorValues vv{{Z(0), Vector1(5.0)}};
auto fg = tiny::createHybridGaussianFactorGraph(num_measurements, vv);
EXPECT_LONGS_EQUAL(3, fg.size());
// Assemble graph tree:
auto actual = fg.assembleGraphTree();
auto actual = fg.collectProductFactor();
// Create expected decision tree with two factor graphs:
@ -700,13 +571,15 @@ TEST(HybridGaussianFactorGraph, assembleGraphTree) {
DiscreteValues d0{{M(0), 0}}, d1{{M(0), 1}};
// Expected decision tree with two factor graphs:
// f(x0;mode=0)P(x0) and f(x0;mode=1)P(x0)
GaussianFactorGraphTree expected{
M(0), GaussianFactorGraph(std::vector<GF>{(*hybrid)(d0), prior}),
GaussianFactorGraph(std::vector<GF>{(*hybrid)(d1), prior})};
// f(x0;mode=0)P(x0)
GaussianFactorGraph expectedFG0{(*hybrid)(d0).first, prior};
EXPECT(assert_equal(expectedFG0, actual(d0).first, 1e-5));
EXPECT(assert_equal(0.0, actual(d0).second, 1e-5));
EXPECT(assert_equal(expected(d0), actual(d0), 1e-5));
EXPECT(assert_equal(expected(d1), actual(d1), 1e-5));
// f(x0;mode=1)P(x0)
GaussianFactorGraph expectedFG1{(*hybrid)(d1).first, prior};
EXPECT(assert_equal(expectedFG1, actual(d1).first, 1e-5));
EXPECT(assert_equal(1.79176, actual(d1).second, 1e-5));
}
/* ****************************************************************************/
@ -746,7 +619,6 @@ bool ratioTest(const HybridBayesNet &bn, const VectorValues &measurements,
// Test ratios for a number of independent samples:
for (size_t i = 0; i < num_samples; i++) {
HybridValues sample = bn.sample(&kRng);
// GTSAM_PRINT(sample);
// std::cout << "ratio: " << compute_ratio(&sample) << std::endl;
if (std::abs(expected_ratio - compute_ratio(&sample)) > 1e-6) return false;
}

View File

@ -0,0 +1,196 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file testHybridGaussianProductFactor.cpp
* @brief Unit tests for HybridGaussianProductFactor
* @author Frank Dellaert
* @date October 2024
*/
#include <gtsam/base/Testable.h>
#include <gtsam/base/TestableAssertions.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridGaussianProductFactor.h>
#include <gtsam/inference/Key.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/linear/GaussianConditional.h>
#include <gtsam/linear/JacobianFactor.h>
// Include for test suite
#include <CppUnitLite/TestHarness.h>
#include <memory>
using namespace std;
using namespace gtsam;
using symbol_shorthand::M;
using symbol_shorthand::X;
/* ************************************************************************* */
namespace examples {
static const DiscreteKey m1(M(1), 2), m2(M(2), 3);
const auto A1 = Matrix::Zero(2, 1);
const auto A2 = Matrix::Zero(2, 2);
const auto b = Matrix::Zero(2, 1);
const auto f10 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b);
const auto f11 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b);
const HybridGaussianFactor hybridFactorA(m1, {{f10, 10}, {f11, 11}});
const auto A3 = Matrix::Zero(2, 3);
const auto f20 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
const auto f21 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
const auto f22 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
const HybridGaussianFactor hybridFactorB(m2, {{f20, 20}, {f21, 21}, {f22, 22}});
// Simulate a pruned hybrid factor, in this case m2==1 is nulled out.
const HybridGaussianFactor prunedFactorB(
m2, {{f20, 20}, {nullptr, 1000}, {f22, 22}});
} // namespace examples
/* ************************************************************************* */
// Constructor
TEST(HybridGaussianProductFactor, Construct) {
HybridGaussianProductFactor product;
}
/* ************************************************************************* */
// Add two Gaussian factors and check only one leaf in tree
TEST(HybridGaussianProductFactor, AddTwoGaussianFactors) {
using namespace examples;
HybridGaussianProductFactor product;
product += f10;
product += f11;
// Check that the product has only one leaf and no discrete variables.
EXPECT_LONGS_EQUAL(1, product.nrLeaves());
EXPECT(product.labels().empty());
// Retrieve the single leaf
auto leaf = product(Assignment<Key>());
// Check that the leaf contains both factors
EXPECT_LONGS_EQUAL(2, leaf.first.size());
EXPECT(leaf.first.at(0) == f10);
EXPECT(leaf.first.at(1) == f11);
EXPECT_DOUBLES_EQUAL(0, leaf.second, 1e-9);
}
/* ************************************************************************* */
// Add two GaussianConditionals and check the resulting tree
TEST(HybridGaussianProductFactor, AddTwoGaussianConditionals) {
// Create two GaussianConditionals
Vector1 d(1.0);
Matrix11 R = I_1x1, S = I_1x1;
auto gc1 = std::make_shared<GaussianConditional>(X(1), d, R, X(2), S);
auto gc2 = std::make_shared<GaussianConditional>(X(2), d, R);
// Create a HybridGaussianProductFactor and add the conditionals
HybridGaussianProductFactor product;
product += std::static_pointer_cast<GaussianFactor>(gc1);
product += std::static_pointer_cast<GaussianFactor>(gc2);
// Check that the product has only one leaf and no discrete variables
EXPECT_LONGS_EQUAL(1, product.nrLeaves());
EXPECT(product.labels().empty());
// Retrieve the single leaf
auto leaf = product(Assignment<Key>());
// Check that the leaf contains both conditionals
EXPECT_LONGS_EQUAL(2, leaf.first.size());
EXPECT(leaf.first.at(0) == gc1);
EXPECT(leaf.first.at(1) == gc2);
EXPECT_DOUBLES_EQUAL(0, leaf.second, 1e-9);
}
/* ************************************************************************* */
// Check AsProductFactor
TEST(HybridGaussianProductFactor, AsProductFactor) {
using namespace examples;
auto product = hybridFactorA.asProductFactor();
// Let's check that this worked:
Assignment<Key> mode;
mode[m1.first] = 0;
auto actual = product(mode);
EXPECT(actual.first.at(0) == f10);
EXPECT_DOUBLES_EQUAL(10, actual.second, 1e-9);
// TODO(Frank): when killed hiding, f11 should also be there
}
/* ************************************************************************* */
// "Add" one hybrid factors together.
TEST(HybridGaussianProductFactor, AddOne) {
using namespace examples;
HybridGaussianProductFactor product;
product += hybridFactorA;
// Let's check that this worked:
Assignment<Key> mode;
mode[m1.first] = 0;
auto actual = product(mode);
EXPECT(actual.first.at(0) == f10);
EXPECT_DOUBLES_EQUAL(10, actual.second, 1e-9);
// TODO(Frank): when killed hiding, f11 should also be there
}
/* ************************************************************************* */
// "Add" two HFG together.
TEST(HybridGaussianProductFactor, AddTwo) {
using namespace examples;
// Create product of two hybrid factors: it will be a decision tree now on
// both discrete variables m1 and m2:
HybridGaussianProductFactor product;
product += hybridFactorA;
product += hybridFactorB;
// Let's check that this worked:
auto actual00 = product({{M(1), 0}, {M(2), 0}});
EXPECT(actual00.first.at(0) == f10);
EXPECT(actual00.first.at(1) == f20);
EXPECT_DOUBLES_EQUAL(10 + 20, actual00.second, 1e-9);
auto actual12 = product({{M(1), 1}, {M(2), 2}});
// TODO(Frank): when killed hiding, these should also equal:
// EXPECT(actual12.first.at(0) == f11);
// EXPECT(actual12.first.at(1) == f22);
EXPECT_DOUBLES_EQUAL(11 + 22, actual12.second, 1e-9);
}
/* ************************************************************************* */
// "Add" two HFG together.
TEST(HybridGaussianProductFactor, AddPruned) {
using namespace examples;
// Create product of two hybrid factors: it will be a decision tree now on
// both discrete variables m1 and m2:
HybridGaussianProductFactor product;
product += hybridFactorA;
product += prunedFactorB;
EXPECT_LONGS_EQUAL(6, product.nrLeaves());
auto pruned = product.removeEmpty();
EXPECT_LONGS_EQUAL(5, pruned.nrLeaves());
}
/* ************************************************************************* */
int main() {
TestResult tr;
return TestRegistry::runAllTests(tr);
}
/* ************************************************************************* */

View File

@ -192,24 +192,29 @@ TEST(HybridGaussianFactor, TwoStateModel2) {
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
// Check that ratio of Bayes net and factor graph for different modes is
// equal for several values of {x0,x1}.
HybridBayesNet::shared_ptr eliminated = gfg.eliminateSequential();
for (VectorValues vv :
{VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
vv.insert(given); // add measurements for HBN
HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}});
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
gfg.error(hv1) / hbn.error(hv1), 1e-9);
}
const auto& expectedDiscretePosterior = hbn.discretePosterior(vv);
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Equality of posteriors asserts that the factor graph is correct (same
// ratios for all modes)
EXPECT(
assert_equal(expectedDiscretePosterior, gfg.discretePosterior(vv)));
// This one asserts that HBN resulting from elimination is correct.
EXPECT(assert_equal(expectedDiscretePosterior,
eliminated->discretePosterior(vv)));
}
// Importance sampling run with 100k samples gives 50.095/49.905
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
// Since no measurement on x1, we a 50/50 probability
auto p_m = bn->at(2)->asDiscrete();
auto p_m = eliminated->at(2)->asDiscrete();
EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 0}}), 1e-9);
EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 1}}), 1e-9);
}
@ -221,6 +226,7 @@ TEST(HybridGaussianFactor, TwoStateModel2) {
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
HybridBayesNet::shared_ptr eliminated = gfg.eliminateSequential();
// Check that ratio of Bayes net and factor graph for different modes is
// equal for several values of {x0,x1}.
@ -228,17 +234,22 @@ TEST(HybridGaussianFactor, TwoStateModel2) {
{VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
vv.insert(given); // add measurements for HBN
HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}});
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
gfg.error(hv1) / hbn.error(hv1), 1e-9);
}
const auto& expectedDiscretePosterior = hbn.discretePosterior(vv);
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Equality of posteriors asserts that the factor graph is correct (same
// ratios for all modes)
EXPECT(
assert_equal(expectedDiscretePosterior, gfg.discretePosterior(vv)));
// This one asserts that HBN resulting from elimination is correct.
EXPECT(assert_equal(expectedDiscretePosterior,
eliminated->discretePosterior(vv)));
}
// Values taken from an importance sampling run with 100k samples:
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
DiscreteConditional expected(m1, "48.3158/51.6842");
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002));
EXPECT(assert_equal(expected, *(eliminated->at(2)->asDiscrete()), 0.002));
}
{

View File

@ -545,14 +545,17 @@ TEST(HybridNonlinearFactorGraph, Printing) {
#ifdef GTSAM_DT_MERGING
string expected_hybridFactorGraph = R"(
size: 7
factor 0:
Factor 0
GaussianFactor:
A[x0] = [
10
]
b = [ -10 ]
No noise model
factor 1:
HybridGaussianFactor
Factor 1
HybridGaussianFactor:
Hybrid [x0 x1; m0]{
Choice(m0)
0 Leaf :
@ -564,6 +567,7 @@ Hybrid [x0 x1; m0]{
]
b = [ -1 ]
No noise model
scalar: 0.918939
1 Leaf :
A[x0] = [
@ -574,10 +578,12 @@ Hybrid [x0 x1; m0]{
]
b = [ -0 ]
No noise model
scalar: 0.918939
}
factor 2:
HybridGaussianFactor
Factor 2
HybridGaussianFactor:
Hybrid [x1 x2; m1]{
Choice(m1)
0 Leaf :
@ -589,6 +595,7 @@ Hybrid [x1 x2; m1]{
]
b = [ -1 ]
No noise model
scalar: 0.918939
1 Leaf :
A[x1] = [
@ -599,24 +606,37 @@ Hybrid [x1 x2; m1]{
]
b = [ -0 ]
No noise model
scalar: 0.918939
}
factor 3:
Factor 3
GaussianFactor:
A[x1] = [
10
]
b = [ -10 ]
No noise model
factor 4:
Factor 4
GaussianFactor:
A[x2] = [
10
]
b = [ -10 ]
No noise model
factor 5: P( m0 ):
Factor 5
DiscreteFactor:
P( m0 ):
Leaf 0.5
factor 6: P( m1 | m0 ):
Factor 6
DiscreteFactor:
P( m1 | m0 ):
Choice(m1)
0 Choice(m0)
0 0 Leaf 0.33333333
@ -625,6 +645,7 @@ factor 6: P( m1 | m0 ):
1 0 Leaf 0.66666667
1 1 Leaf 0.4
)";
#else
string expected_hybridFactorGraph = R"(
@ -717,7 +738,7 @@ factor 6: P( m1 | m0 ):
// Expected output for hybridBayesNet.
string expected_hybridBayesNet = R"(
size: 3
conditional 0: Hybrid P( x0 | x1 m0)
conditional 0: P( x0 | x1 m0)
Discrete Keys = (m0, 2),
logNormalizationConstant: 1.38862
@ -736,7 +757,7 @@ conditional 0: Hybrid P( x0 | x1 m0)
logNormalizationConstant: 1.38862
No noise model
conditional 1: Hybrid P( x1 | x2 m0 m1)
conditional 1: P( x1 | x2 m0 m1)
Discrete Keys = (m0, 2), (m1, 2),
logNormalizationConstant: 1.3935
@ -771,7 +792,7 @@ conditional 1: Hybrid P( x1 | x2 m0 m1)
logNormalizationConstant: 1.3935
No noise model
conditional 2: Hybrid P( x2 | m0 m1)
conditional 2: P( x2 | m0 m1)
Discrete Keys = (m0, 2), (m1, 2),
logNormalizationConstant: 1.38857

View File

@ -52,13 +52,18 @@ BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_AlgebraicDecisionTree_Leaf");
BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_AlgebraicDecisionTree_Choice")
BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor, "gtsam_HybridGaussianFactor");
BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors,
BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::FactorValuePairs,
"gtsam_HybridGaussianFactor_Factors");
BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Leaf,
BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::FactorValuePairs::Leaf,
"gtsam_HybridGaussianFactor_Factors_Leaf");
BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Choice,
BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::FactorValuePairs::Choice,
"gtsam_HybridGaussianFactor_Factors_Choice");
BOOST_CLASS_EXPORT_GUID(GaussianFactorGraphValuePair,
"gtsam_GaussianFactorGraphValuePair");
BOOST_CLASS_EXPORT_GUID(HybridGaussianProductFactor,
"gtsam_HybridGaussianProductFactor");
BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional,
"gtsam_HybridGaussianConditional");
BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals,