Merge pull request #1831 from borglab/hybrid-error-scalars
commit
f7b5f3c22c
|
@ -55,23 +55,14 @@ HybridGaussianConditional::conditionals() const {
|
|||
return conditionals_;
|
||||
}
|
||||
|
||||
/* *******************************************************************************/
|
||||
HybridGaussianConditional::HybridGaussianConditional(
|
||||
KeyVector &&continuousFrontals, KeyVector &&continuousParents,
|
||||
DiscreteKeys &&discreteParents,
|
||||
std::vector<GaussianConditional::shared_ptr> &&conditionals)
|
||||
: HybridGaussianConditional(continuousFrontals, continuousParents,
|
||||
discreteParents,
|
||||
Conditionals(discreteParents, conditionals)) {}
|
||||
|
||||
/* *******************************************************************************/
|
||||
HybridGaussianConditional::HybridGaussianConditional(
|
||||
const KeyVector &continuousFrontals, const KeyVector &continuousParents,
|
||||
const DiscreteKeys &discreteParents,
|
||||
const DiscreteKey &discreteParent,
|
||||
const std::vector<GaussianConditional::shared_ptr> &conditionals)
|
||||
: HybridGaussianConditional(continuousFrontals, continuousParents,
|
||||
discreteParents,
|
||||
Conditionals(discreteParents, conditionals)) {}
|
||||
DiscreteKeys{discreteParent},
|
||||
Conditionals({discreteParent}, conditionals)) {}
|
||||
|
||||
/* *******************************************************************************/
|
||||
// TODO(dellaert): This is copy/paste: HybridGaussianConditional should be
|
||||
|
@ -219,23 +210,20 @@ std::shared_ptr<HybridGaussianFactor> HybridGaussianConditional::likelihood(
|
|||
|
||||
const DiscreteKeys discreteParentKeys = discreteKeys();
|
||||
const KeyVector continuousParentKeys = continuousParents();
|
||||
const HybridGaussianFactor::Factors likelihoods(
|
||||
conditionals_, [&](const GaussianConditional::shared_ptr &conditional) {
|
||||
const HybridGaussianFactor::FactorValuePairs likelihoods(
|
||||
conditionals_,
|
||||
[&](const GaussianConditional::shared_ptr &conditional)
|
||||
-> GaussianFactorValuePair {
|
||||
const auto likelihood_m = conditional->likelihood(given);
|
||||
const double Cgm_Kgcm =
|
||||
logConstant_ - conditional->logNormalizationConstant();
|
||||
if (Cgm_Kgcm == 0.0) {
|
||||
return likelihood_m;
|
||||
return {likelihood_m, 0.0};
|
||||
} else {
|
||||
// Add a constant factor to the likelihood in case the noise models
|
||||
// Add a constant to the likelihood in case the noise models
|
||||
// are not all equal.
|
||||
GaussianFactorGraph gfg;
|
||||
gfg.push_back(likelihood_m);
|
||||
Vector c(1);
|
||||
c << std::sqrt(2.0 * Cgm_Kgcm);
|
||||
auto constantFactor = std::make_shared<JacobianFactor>(c);
|
||||
gfg.push_back(constantFactor);
|
||||
return std::make_shared<JacobianFactor>(gfg);
|
||||
double c = 2.0 * Cgm_Kgcm;
|
||||
return {likelihood_m, c};
|
||||
}
|
||||
});
|
||||
return std::make_shared<HybridGaussianFactor>(
|
||||
|
|
|
@ -107,29 +107,18 @@ class GTSAM_EXPORT HybridGaussianConditional
|
|||
const Conditionals &conditionals);
|
||||
|
||||
/**
|
||||
* @brief Make a Gaussian Mixture from a list of Gaussian conditionals
|
||||
* @brief Make a Gaussian Mixture from a vector of Gaussian conditionals.
|
||||
* The DecisionTree-based constructor is preferred over this one.
|
||||
*
|
||||
* @param continuousFrontals The continuous frontal variables
|
||||
* @param continuousParents The continuous parent variables
|
||||
* @param discreteParents Discrete parents variables
|
||||
* @param conditionals List of conditionals
|
||||
*/
|
||||
HybridGaussianConditional(
|
||||
KeyVector &&continuousFrontals, KeyVector &&continuousParents,
|
||||
DiscreteKeys &&discreteParents,
|
||||
std::vector<GaussianConditional::shared_ptr> &&conditionals);
|
||||
|
||||
/**
|
||||
* @brief Make a Gaussian Mixture from a list of Gaussian conditionals
|
||||
*
|
||||
* @param continuousFrontals The continuous frontal variables
|
||||
* @param continuousParents The continuous parent variables
|
||||
* @param discreteParents Discrete parents variables
|
||||
* @param conditionals List of conditionals
|
||||
* @param discreteParent Single discrete parent variable
|
||||
* @param conditionals Vector of conditionals with the same size as the
|
||||
* cardinality of the discrete parent.
|
||||
*/
|
||||
HybridGaussianConditional(
|
||||
const KeyVector &continuousFrontals, const KeyVector &continuousParents,
|
||||
const DiscreteKeys &discreteParents,
|
||||
const DiscreteKey &discreteParent,
|
||||
const std::vector<GaussianConditional::shared_ptr> &conditionals);
|
||||
|
||||
/// @}
|
||||
|
|
|
@ -28,11 +28,56 @@
|
|||
|
||||
namespace gtsam {
|
||||
|
||||
/**
|
||||
* @brief Helper function to augment the [A|b] matrices in the factor components
|
||||
* with the normalizer values.
|
||||
* This is done by storing the normalizer value in
|
||||
* the `b` vector as an additional row.
|
||||
*
|
||||
* @param factors DecisionTree of GaussianFactors and arbitrary scalars.
|
||||
* Gaussian factor in factors.
|
||||
* @return HybridGaussianFactor::Factors
|
||||
*/
|
||||
HybridGaussianFactor::Factors augment(
|
||||
const HybridGaussianFactor::FactorValuePairs &factors) {
|
||||
// Find the minimum value so we can "proselytize" to positive values.
|
||||
// Done because we can't have sqrt of negative numbers.
|
||||
HybridGaussianFactor::Factors gaussianFactors;
|
||||
AlgebraicDecisionTree<Key> valueTree;
|
||||
std::tie(gaussianFactors, valueTree) = unzip(factors);
|
||||
|
||||
// Normalize
|
||||
double min_value = valueTree.min();
|
||||
AlgebraicDecisionTree<Key> values =
|
||||
valueTree.apply([&min_value](double n) { return n - min_value; });
|
||||
|
||||
// Finally, update the [A|b] matrices.
|
||||
auto update = [&values](const Assignment<Key> &assignment,
|
||||
const HybridGaussianFactor::sharedFactor &gf) {
|
||||
auto jf = std::dynamic_pointer_cast<JacobianFactor>(gf);
|
||||
if (!jf) return gf;
|
||||
// If the log_normalizer is 0, do nothing
|
||||
if (values(assignment) == 0.0) return gf;
|
||||
|
||||
GaussianFactorGraph gfg;
|
||||
gfg.push_back(jf);
|
||||
|
||||
Vector c(1);
|
||||
c << std::sqrt(values(assignment));
|
||||
auto constantFactor = std::make_shared<JacobianFactor>(c);
|
||||
|
||||
gfg.push_back(constantFactor);
|
||||
return std::dynamic_pointer_cast<GaussianFactor>(
|
||||
std::make_shared<JacobianFactor>(gfg));
|
||||
};
|
||||
return gaussianFactors.apply(update);
|
||||
}
|
||||
|
||||
/* *******************************************************************************/
|
||||
HybridGaussianFactor::HybridGaussianFactor(const KeyVector &continuousKeys,
|
||||
const DiscreteKeys &discreteKeys,
|
||||
const Factors &factors)
|
||||
: Base(continuousKeys, discreteKeys), factors_(factors) {}
|
||||
const FactorValuePairs &factors)
|
||||
: Base(continuousKeys, discreteKeys), factors_(augment(factors)) {}
|
||||
|
||||
/* *******************************************************************************/
|
||||
bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const {
|
||||
|
|
|
@ -33,6 +33,9 @@ class HybridValues;
|
|||
class DiscreteValues;
|
||||
class VectorValues;
|
||||
|
||||
/// Alias for pair of GaussianFactor::shared_pointer and a double value.
|
||||
using GaussianFactorValuePair = std::pair<GaussianFactor::shared_ptr, double>;
|
||||
|
||||
/**
|
||||
* @brief Implementation of a discrete conditional mixture factor.
|
||||
* Implements a joint discrete-continuous factor where the discrete variable
|
||||
|
@ -52,7 +55,9 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor {
|
|||
|
||||
using sharedFactor = std::shared_ptr<GaussianFactor>;
|
||||
|
||||
/// typedef for Decision Tree of Gaussian factors and log-constant.
|
||||
/// typedef for Decision Tree of Gaussian factors and arbitrary value.
|
||||
using FactorValuePairs = DecisionTree<Key, GaussianFactorValuePair>;
|
||||
/// typedef for Decision Tree of Gaussian factors.
|
||||
using Factors = DecisionTree<Key, sharedFactor>;
|
||||
|
||||
private:
|
||||
|
@ -80,26 +85,26 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor {
|
|||
* @param continuousKeys A vector of keys representing continuous variables.
|
||||
* @param discreteKeys A vector of keys representing discrete variables and
|
||||
* their cardinalities.
|
||||
* @param factors The decision tree of Gaussian factors stored
|
||||
* as the mixture density.
|
||||
* @param factors The decision tree of Gaussian factors and arbitrary scalars.
|
||||
*/
|
||||
HybridGaussianFactor(const KeyVector &continuousKeys,
|
||||
const DiscreteKeys &discreteKeys,
|
||||
const Factors &factors);
|
||||
const FactorValuePairs &factors);
|
||||
|
||||
/**
|
||||
* @brief Construct a new HybridGaussianFactor object using a vector of
|
||||
* GaussianFactor shared pointers.
|
||||
*
|
||||
* @param continuousKeys Vector of keys for continuous factors.
|
||||
* @param discreteKeys Vector of discrete keys.
|
||||
* @param factors Vector of gaussian factor shared pointers.
|
||||
* @param discreteKey The discrete key to index each component.
|
||||
* @param factors Vector of gaussian factor shared pointers
|
||||
* and arbitrary scalars. Same size as the cardinality of discreteKey.
|
||||
*/
|
||||
HybridGaussianFactor(const KeyVector &continuousKeys,
|
||||
const DiscreteKeys &discreteKeys,
|
||||
const std::vector<sharedFactor> &factors)
|
||||
: HybridGaussianFactor(continuousKeys, discreteKeys,
|
||||
Factors(discreteKeys, factors)) {}
|
||||
const DiscreteKey &discreteKey,
|
||||
const std::vector<GaussianFactorValuePair> &factors)
|
||||
: HybridGaussianFactor(continuousKeys, {discreteKey},
|
||||
FactorValuePairs({discreteKey}, factors)) {}
|
||||
|
||||
/// @}
|
||||
/// @name Testable
|
||||
|
|
|
@ -92,13 +92,13 @@ void HybridGaussianFactorGraph::printErrors(
|
|||
// Clear the stringstream
|
||||
ss.str(std::string());
|
||||
|
||||
if (auto gmf = std::dynamic_pointer_cast<HybridGaussianFactor>(factor)) {
|
||||
if (auto hgf = std::dynamic_pointer_cast<HybridGaussianFactor>(factor)) {
|
||||
if (factor == nullptr) {
|
||||
std::cout << "nullptr"
|
||||
<< "\n";
|
||||
} else {
|
||||
gmf->operator()(values.discrete())->print(ss.str(), keyFormatter);
|
||||
std::cout << "error = " << gmf->error(values) << std::endl;
|
||||
hgf->operator()(values.discrete())->print(ss.str(), keyFormatter);
|
||||
std::cout << "error = " << factor->error(values) << std::endl;
|
||||
}
|
||||
} else if (auto hc = std::dynamic_pointer_cast<HybridConditional>(factor)) {
|
||||
if (factor == nullptr) {
|
||||
|
@ -348,7 +348,7 @@ static std::shared_ptr<Factor> createHybridGaussianFactor(
|
|||
const KeyVector &continuousSeparator,
|
||||
const DiscreteKeys &discreteSeparator) {
|
||||
// Correct for the normalization constant used up by the conditional
|
||||
auto correct = [&](const Result &pair) -> GaussianFactor::shared_ptr {
|
||||
auto correct = [&](const Result &pair) -> GaussianFactorValuePair {
|
||||
const auto &[conditional, factor] = pair;
|
||||
if (factor) {
|
||||
auto hf = std::dynamic_pointer_cast<HessianFactor>(factor);
|
||||
|
@ -357,10 +357,10 @@ static std::shared_ptr<Factor> createHybridGaussianFactor(
|
|||
// as per the Hessian definition
|
||||
hf->constantTerm() += 2.0 * conditional->logNormalizationConstant();
|
||||
}
|
||||
return factor;
|
||||
return {factor, 0.0};
|
||||
};
|
||||
DecisionTree<Key, GaussianFactor::shared_ptr> newFactors(eliminationResults,
|
||||
correct);
|
||||
DecisionTree<Key, GaussianFactorValuePair> newFactors(eliminationResults,
|
||||
correct);
|
||||
|
||||
return std::make_shared<HybridGaussianFactor>(continuousSeparator,
|
||||
discreteSeparator, newFactors);
|
||||
|
@ -597,10 +597,10 @@ GaussianFactorGraph HybridGaussianFactorGraph::operator()(
|
|||
gfg.push_back(gf);
|
||||
} else if (auto gc = std::dynamic_pointer_cast<GaussianConditional>(f)) {
|
||||
gfg.push_back(gf);
|
||||
} else if (auto gmf = std::dynamic_pointer_cast<HybridGaussianFactor>(f)) {
|
||||
gfg.push_back((*gmf)(assignment));
|
||||
} else if (auto gm = dynamic_pointer_cast<HybridGaussianConditional>(f)) {
|
||||
gfg.push_back((*gm)(assignment));
|
||||
} else if (auto hgf = std::dynamic_pointer_cast<HybridGaussianFactor>(f)) {
|
||||
gfg.push_back((*hgf)(assignment));
|
||||
} else if (auto hgc = dynamic_pointer_cast<HybridGaussianConditional>(f)) {
|
||||
gfg.push_back((*hgc)(assignment));
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -33,6 +33,9 @@
|
|||
|
||||
namespace gtsam {
|
||||
|
||||
/// Alias for a NonlinearFactor shared pointer and double scalar pair.
|
||||
using NonlinearFactorValuePair = std::pair<NonlinearFactor::shared_ptr, double>;
|
||||
|
||||
/**
|
||||
* @brief Implementation of a discrete conditional mixture factor.
|
||||
*
|
||||
|
@ -53,9 +56,9 @@ class HybridNonlinearFactor : public HybridFactor {
|
|||
|
||||
/**
|
||||
* @brief typedef for DecisionTree which has Keys as node labels and
|
||||
* NonlinearFactor as leaf nodes.
|
||||
* pairs of NonlinearFactor & an arbitrary scalar as leaf nodes.
|
||||
*/
|
||||
using Factors = DecisionTree<Key, sharedFactor>;
|
||||
using Factors = DecisionTree<Key, NonlinearFactorValuePair>;
|
||||
|
||||
private:
|
||||
/// Decision tree of Gaussian factors indexed by discrete keys.
|
||||
|
@ -89,32 +92,34 @@ class HybridNonlinearFactor : public HybridFactor {
|
|||
* @tparam FACTOR The type of the factor shared pointers being passed in.
|
||||
* Will be typecast to NonlinearFactor shared pointers.
|
||||
* @param keys Vector of keys for continuous factors.
|
||||
* @param discreteKeys Vector of discrete keys.
|
||||
* @param factors Vector of nonlinear factors.
|
||||
* @param discreteKey The discrete key indexing each component factor.
|
||||
* @param factors Vector of nonlinear factor and scalar pairs.
|
||||
* Same size as the cardinality of discreteKey.
|
||||
* @param normalized Flag indicating if the factor error is already
|
||||
* normalized.
|
||||
*/
|
||||
template <typename FACTOR>
|
||||
HybridNonlinearFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys,
|
||||
const std::vector<std::shared_ptr<FACTOR>>& factors,
|
||||
bool normalized = false)
|
||||
: Base(keys, discreteKeys), normalized_(normalized) {
|
||||
std::vector<NonlinearFactor::shared_ptr> nonlinear_factors;
|
||||
HybridNonlinearFactor(
|
||||
const KeyVector& keys, const DiscreteKey& discreteKey,
|
||||
const std::vector<std::pair<std::shared_ptr<FACTOR>, double>>& factors,
|
||||
bool normalized = false)
|
||||
: Base(keys, {discreteKey}), normalized_(normalized) {
|
||||
std::vector<NonlinearFactorValuePair> nonlinear_factors;
|
||||
KeySet continuous_keys_set(keys.begin(), keys.end());
|
||||
KeySet factor_keys_set;
|
||||
for (auto&& f : factors) {
|
||||
for (auto&& [f, val] : factors) {
|
||||
// Insert all factor continuous keys in the continuous keys set.
|
||||
std::copy(f->keys().begin(), f->keys().end(),
|
||||
std::inserter(factor_keys_set, factor_keys_set.end()));
|
||||
|
||||
if (auto nf = std::dynamic_pointer_cast<NonlinearFactor>(f)) {
|
||||
nonlinear_factors.push_back(nf);
|
||||
nonlinear_factors.emplace_back(nf, val);
|
||||
} else {
|
||||
throw std::runtime_error(
|
||||
"Factors passed into HybridNonlinearFactor need to be nonlinear!");
|
||||
}
|
||||
}
|
||||
factors_ = Factors(discreteKeys, nonlinear_factors);
|
||||
factors_ = Factors({discreteKey}, nonlinear_factors);
|
||||
|
||||
if (continuous_keys_set != factor_keys_set) {
|
||||
throw std::runtime_error(
|
||||
|
@ -133,9 +138,11 @@ class HybridNonlinearFactor : public HybridFactor {
|
|||
*/
|
||||
AlgebraicDecisionTree<Key> errorTree(const Values& continuousValues) const {
|
||||
// functor to convert from sharedFactor to double error value.
|
||||
auto errorFunc = [continuousValues](const sharedFactor& factor) {
|
||||
return factor->error(continuousValues);
|
||||
};
|
||||
auto errorFunc =
|
||||
[continuousValues](const std::pair<sharedFactor, double>& f) {
|
||||
auto [factor, val] = f;
|
||||
return factor->error(continuousValues) + (0.5 * val);
|
||||
};
|
||||
DecisionTree<Key, double> result(factors_, errorFunc);
|
||||
return result;
|
||||
}
|
||||
|
@ -150,12 +157,10 @@ class HybridNonlinearFactor : public HybridFactor {
|
|||
double error(const Values& continuousValues,
|
||||
const DiscreteValues& discreteValues) const {
|
||||
// Retrieve the factor corresponding to the assignment in discreteValues.
|
||||
auto factor = factors_(discreteValues);
|
||||
auto [factor, val] = factors_(discreteValues);
|
||||
// Compute the error for the selected factor
|
||||
const double factorError = factor->error(continuousValues);
|
||||
if (normalized_) return factorError;
|
||||
return factorError + this->nonlinearFactorLogNormalizingConstant(
|
||||
factor, continuousValues);
|
||||
return factorError + (0.5 * val);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -175,7 +180,7 @@ class HybridNonlinearFactor : public HybridFactor {
|
|||
*/
|
||||
size_t dim() const {
|
||||
const auto assignments = DiscreteValues::CartesianProduct(discreteKeys_);
|
||||
auto factor = factors_(assignments.at(0));
|
||||
auto [factor, val] = factors_(assignments.at(0));
|
||||
return factor->dim();
|
||||
}
|
||||
|
||||
|
@ -189,9 +194,11 @@ class HybridNonlinearFactor : public HybridFactor {
|
|||
std::cout << (s.empty() ? "" : s + " ");
|
||||
Base::print("", keyFormatter);
|
||||
std::cout << "\nHybridNonlinearFactor\n";
|
||||
auto valueFormatter = [](const sharedFactor& v) {
|
||||
if (v) {
|
||||
return "Nonlinear factor on " + std::to_string(v->size()) + " keys";
|
||||
auto valueFormatter = [](const std::pair<sharedFactor, double>& v) {
|
||||
auto [factor, val] = v;
|
||||
if (factor) {
|
||||
return "Nonlinear factor on " + std::to_string(factor->size()) +
|
||||
" keys";
|
||||
} else {
|
||||
return std::string("nullptr");
|
||||
}
|
||||
|
@ -211,8 +218,10 @@ class HybridNonlinearFactor : public HybridFactor {
|
|||
static_cast<const HybridNonlinearFactor&>(other));
|
||||
|
||||
// Ensure that this HybridNonlinearFactor and `f` have the same `factors_`.
|
||||
auto compare = [tol](const sharedFactor& a, const sharedFactor& b) {
|
||||
return traits<NonlinearFactor>::Equals(*a, *b, tol);
|
||||
auto compare = [tol](const std::pair<sharedFactor, double>& a,
|
||||
const std::pair<sharedFactor, double>& b) {
|
||||
return traits<NonlinearFactor>::Equals(*a.first, *b.first, tol) &&
|
||||
(a.second == b.second);
|
||||
};
|
||||
if (!factors_.equals(f.factors_, compare)) return false;
|
||||
|
||||
|
@ -230,7 +239,7 @@ class HybridNonlinearFactor : public HybridFactor {
|
|||
GaussianFactor::shared_ptr linearize(
|
||||
const Values& continuousValues,
|
||||
const DiscreteValues& discreteValues) const {
|
||||
auto factor = factors_(discreteValues);
|
||||
auto factor = factors_(discreteValues).first;
|
||||
return factor->linearize(continuousValues);
|
||||
}
|
||||
|
||||
|
@ -238,12 +247,15 @@ class HybridNonlinearFactor : public HybridFactor {
|
|||
std::shared_ptr<HybridGaussianFactor> linearize(
|
||||
const Values& continuousValues) const {
|
||||
// functional to linearize each factor in the decision tree
|
||||
auto linearizeDT = [continuousValues](const sharedFactor& factor) {
|
||||
return factor->linearize(continuousValues);
|
||||
auto linearizeDT =
|
||||
[continuousValues](const std::pair<sharedFactor, double>& f)
|
||||
-> GaussianFactorValuePair {
|
||||
auto [factor, val] = f;
|
||||
return {factor->linearize(continuousValues), val};
|
||||
};
|
||||
|
||||
DecisionTree<Key, GaussianFactor::shared_ptr> linearized_factors(
|
||||
factors_, linearizeDT);
|
||||
DecisionTree<Key, std::pair<GaussianFactor::shared_ptr, double>>
|
||||
linearized_factors(factors_, linearizeDT);
|
||||
|
||||
return std::make_shared<HybridGaussianFactor>(
|
||||
continuousKeys_, discreteKeys_, linearized_factors);
|
||||
|
|
|
@ -76,8 +76,9 @@ virtual class HybridConditional {
|
|||
class HybridGaussianFactor : gtsam::HybridFactor {
|
||||
HybridGaussianFactor(
|
||||
const gtsam::KeyVector& continuousKeys,
|
||||
const gtsam::DiscreteKeys& discreteKeys,
|
||||
const std::vector<gtsam::GaussianFactor::shared_ptr>& factorsList);
|
||||
const gtsam::DiscreteKey& discreteKey,
|
||||
const std::vector<std::pair<gtsam::GaussianFactor::shared_ptr, double>>&
|
||||
factorsList);
|
||||
|
||||
void print(string s = "HybridGaussianFactor\n",
|
||||
const gtsam::KeyFormatter& keyFormatter =
|
||||
|
@ -90,8 +91,12 @@ class HybridGaussianConditional : gtsam::HybridFactor {
|
|||
const gtsam::KeyVector& continuousFrontals,
|
||||
const gtsam::KeyVector& continuousParents,
|
||||
const gtsam::DiscreteKeys& discreteParents,
|
||||
const std::vector<gtsam::GaussianConditional::shared_ptr>&
|
||||
conditionalsList);
|
||||
const gtsam::HybridGaussianConditional::Conditionals& conditionals);
|
||||
HybridGaussianConditional(
|
||||
const gtsam::KeyVector& continuousFrontals,
|
||||
const gtsam::KeyVector& continuousParents,
|
||||
const gtsam::DiscreteKey& discreteParent,
|
||||
const std::vector<gtsam::GaussianConditional::shared_ptr>& conditionals);
|
||||
|
||||
gtsam::HybridGaussianFactor* likelihood(
|
||||
const gtsam::VectorValues& frontals) const;
|
||||
|
@ -242,14 +247,14 @@ class HybridNonlinearFactorGraph {
|
|||
class HybridNonlinearFactor : gtsam::HybridFactor {
|
||||
HybridNonlinearFactor(
|
||||
const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys,
|
||||
const gtsam::DecisionTree<gtsam::Key, gtsam::NonlinearFactor*>& factors,
|
||||
const gtsam::DecisionTree<
|
||||
gtsam::Key, std::pair<gtsam::NonlinearFactor*, double>>& factors,
|
||||
bool normalized = false);
|
||||
|
||||
template <FACTOR = {gtsam::NonlinearFactor}>
|
||||
HybridNonlinearFactor(const gtsam::KeyVector& keys,
|
||||
const gtsam::DiscreteKeys& discreteKeys,
|
||||
const std::vector<FACTOR*>& factors,
|
||||
bool normalized = false);
|
||||
HybridNonlinearFactor(
|
||||
const gtsam::KeyVector& keys, const gtsam::DiscreteKey& discreteKey,
|
||||
const std::vector<std::pair<gtsam::NonlinearFactor*, double>>& factors,
|
||||
bool normalized = false);
|
||||
|
||||
double error(const gtsam::Values& continuousValues,
|
||||
const gtsam::DiscreteValues& discreteValues) const;
|
||||
|
|
|
@ -57,12 +57,16 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain(
|
|||
|
||||
// keyFunc(1) to keyFunc(n+1)
|
||||
for (size_t t = 1; t < n; t++) {
|
||||
hfg.add(HybridGaussianFactor(
|
||||
{keyFunc(t), keyFunc(t + 1)}, {{dKeyFunc(t), 2}},
|
||||
{std::make_shared<JacobianFactor>(keyFunc(t), I_3x3, keyFunc(t + 1),
|
||||
I_3x3, Z_3x1),
|
||||
std::make_shared<JacobianFactor>(keyFunc(t), I_3x3, keyFunc(t + 1),
|
||||
I_3x3, Vector3::Ones())}));
|
||||
DiscreteKeys dKeys{{dKeyFunc(t), 2}};
|
||||
HybridGaussianFactor::FactorValuePairs components(
|
||||
dKeys, {{std::make_shared<JacobianFactor>(keyFunc(t), I_3x3,
|
||||
keyFunc(t + 1), I_3x3, Z_3x1),
|
||||
0.0},
|
||||
{std::make_shared<JacobianFactor>(
|
||||
keyFunc(t), I_3x3, keyFunc(t + 1), I_3x3, Vector3::Ones()),
|
||||
0.0}});
|
||||
hfg.add(
|
||||
HybridGaussianFactor({keyFunc(t), keyFunc(t + 1)}, dKeys, components));
|
||||
|
||||
if (t > 1) {
|
||||
hfg.add(DecisionTreeFactor({{dKeyFunc(t - 1), 2}, {dKeyFunc(t), 2}},
|
||||
|
@ -159,12 +163,13 @@ struct Switching {
|
|||
for (size_t k = 0; k < K - 1; k++) {
|
||||
KeyVector keys = {X(k), X(k + 1)};
|
||||
auto motion_models = motionModels(k, between_sigma);
|
||||
std::vector<NonlinearFactor::shared_ptr> components;
|
||||
std::vector<NonlinearFactorValuePair> components;
|
||||
for (auto &&f : motion_models) {
|
||||
components.push_back(std::dynamic_pointer_cast<NonlinearFactor>(f));
|
||||
components.push_back(
|
||||
{std::dynamic_pointer_cast<NonlinearFactor>(f), 0.0});
|
||||
}
|
||||
nonlinearFactorGraph.emplace_shared<HybridNonlinearFactor>(
|
||||
keys, DiscreteKeys{modes[k]}, components);
|
||||
nonlinearFactorGraph.emplace_shared<HybridNonlinearFactor>(keys, modes[k],
|
||||
components);
|
||||
}
|
||||
|
||||
// Add measurement factors
|
||||
|
|
|
@ -43,12 +43,11 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1,
|
|||
// Create Gaussian mixture z_i = x0 + noise for each measurement.
|
||||
for (size_t i = 0; i < num_measurements; i++) {
|
||||
const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode;
|
||||
std::vector<GaussianConditional::shared_ptr> conditionals{
|
||||
GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 0.5),
|
||||
GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 3)};
|
||||
bayesNet.emplace_shared<HybridGaussianConditional>(
|
||||
KeyVector{Z(i)}, KeyVector{X(0)}, DiscreteKeys{mode_i},
|
||||
std::vector{GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0),
|
||||
Z_1x1, 0.5),
|
||||
GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0),
|
||||
Z_1x1, 3)});
|
||||
KeyVector{Z(i)}, KeyVector{X(0)}, mode_i, conditionals);
|
||||
}
|
||||
|
||||
// Create prior on X(0).
|
||||
|
|
|
@ -108,7 +108,7 @@ TEST(HybridBayesNet, evaluateHybrid) {
|
|||
HybridBayesNet bayesNet;
|
||||
bayesNet.push_back(continuousConditional);
|
||||
bayesNet.emplace_shared<HybridGaussianConditional>(
|
||||
KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia},
|
||||
KeyVector{X(1)}, KeyVector{}, Asia,
|
||||
std::vector{conditional0, conditional1});
|
||||
bayesNet.emplace_shared<DiscreteConditional>(Asia, "99/1");
|
||||
|
||||
|
@ -169,7 +169,7 @@ TEST(HybridBayesNet, Error) {
|
|||
X(1), Vector1::Constant(2), I_1x1, model1);
|
||||
|
||||
auto gm = std::make_shared<HybridGaussianConditional>(
|
||||
KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia},
|
||||
KeyVector{X(1)}, KeyVector{}, Asia,
|
||||
std::vector{conditional0, conditional1});
|
||||
// Create hybrid Bayes net.
|
||||
HybridBayesNet bayesNet;
|
||||
|
@ -383,14 +383,16 @@ TEST(HybridBayesNet, Sampling) {
|
|||
HybridNonlinearFactorGraph nfg;
|
||||
|
||||
auto noise_model = noiseModel::Diagonal::Sigmas(Vector1(1.0));
|
||||
nfg.emplace_shared<PriorFactor<double>>(X(0), 0.0, noise_model);
|
||||
|
||||
auto zero_motion =
|
||||
std::make_shared<BetweenFactor<double>>(X(0), X(1), 0, noise_model);
|
||||
auto one_motion =
|
||||
std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model);
|
||||
std::vector<NonlinearFactor::shared_ptr> factors = {zero_motion, one_motion};
|
||||
nfg.emplace_shared<PriorFactor<double>>(X(0), 0.0, noise_model);
|
||||
nfg.emplace_shared<HybridNonlinearFactor>(
|
||||
KeyVector{X(0), X(1)}, DiscreteKeys{DiscreteKey(M(0), 2)}, factors);
|
||||
KeyVector{X(0), X(1)}, DiscreteKey(M(0), 2),
|
||||
std::vector<NonlinearFactorValuePair>{{zero_motion, 0.0},
|
||||
{one_motion, 0.0}});
|
||||
|
||||
DiscreteKey mode(M(0), 2);
|
||||
nfg.emplace_shared<DiscreteDistribution>(mode, "1/1");
|
||||
|
|
|
@ -435,9 +435,10 @@ static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() {
|
|||
std::make_shared<BetweenFactor<double>>(X(0), X(1), 0, noise_model);
|
||||
const auto one_motion =
|
||||
std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model);
|
||||
nfg.emplace_shared<HybridNonlinearFactor>(
|
||||
KeyVector{X(0), X(1)}, DiscreteKeys{m},
|
||||
std::vector<NonlinearFactor::shared_ptr>{zero_motion, one_motion});
|
||||
std::vector<NonlinearFactorValuePair> components = {{zero_motion, 0.0},
|
||||
{one_motion, 0.0}};
|
||||
nfg.emplace_shared<HybridNonlinearFactor>(KeyVector{X(0), X(1)}, m,
|
||||
components);
|
||||
|
||||
return nfg;
|
||||
}
|
||||
|
@ -560,8 +561,13 @@ std::shared_ptr<HybridGaussianFactor> mixedVarianceFactor(
|
|||
}
|
||||
};
|
||||
auto updated_components = gmf->factors().apply(func);
|
||||
auto updated_pairs = HybridGaussianFactor::FactorValuePairs(
|
||||
updated_components,
|
||||
[](const GaussianFactor::shared_ptr& gf) -> GaussianFactorValuePair {
|
||||
return {gf, 0.0};
|
||||
});
|
||||
auto updated_gmf = std::make_shared<HybridGaussianFactor>(
|
||||
gmf->continuousKeys(), gmf->discreteKeys(), updated_components);
|
||||
gmf->continuousKeys(), gmf->discreteKeys(), updated_pairs);
|
||||
|
||||
return updated_gmf;
|
||||
}
|
||||
|
@ -577,9 +583,6 @@ TEST(HybridEstimation, ModeSelection) {
|
|||
graph.emplace_shared<PriorFactor<double>>(X(0), 0.0, measurement_model);
|
||||
graph.emplace_shared<PriorFactor<double>>(X(1), 0.0, measurement_model);
|
||||
|
||||
DiscreteKeys modes;
|
||||
modes.emplace_back(M(0), 2);
|
||||
|
||||
// The size of the noise model
|
||||
size_t d = 1;
|
||||
double noise_tight = 0.5, noise_loose = 5.0;
|
||||
|
@ -588,10 +591,11 @@ TEST(HybridEstimation, ModeSelection) {
|
|||
X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_loose)),
|
||||
model1 = std::make_shared<MotionModel>(
|
||||
X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight));
|
||||
|
||||
std::vector<NonlinearFactor::shared_ptr> components = {model0, model1};
|
||||
std::vector<NonlinearFactorValuePair> components = {{model0, 0.0},
|
||||
{model1, 0.0}};
|
||||
|
||||
KeyVector keys = {X(0), X(1)};
|
||||
DiscreteKey modes(M(0), 2);
|
||||
HybridNonlinearFactor mf(keys, modes, components);
|
||||
|
||||
initial.insert(X(0), 0.0);
|
||||
|
@ -610,18 +614,22 @@ TEST(HybridEstimation, ModeSelection) {
|
|||
|
||||
/**************************************************************/
|
||||
HybridBayesNet bn;
|
||||
const DiscreteKey mode{M(0), 2};
|
||||
const DiscreteKey mode(M(0), 2);
|
||||
|
||||
bn.push_back(
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1));
|
||||
bn.push_back(
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1));
|
||||
|
||||
std::vector<GaussianConditional::shared_ptr> conditionals{
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1),
|
||||
Z_1x1, noise_loose),
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1),
|
||||
Z_1x1, noise_tight)};
|
||||
bn.emplace_shared<HybridGaussianConditional>(
|
||||
KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode},
|
||||
std::vector{GaussianConditional::sharedMeanAndStddev(
|
||||
Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_loose),
|
||||
GaussianConditional::sharedMeanAndStddev(
|
||||
Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_tight)});
|
||||
HybridGaussianConditional::Conditionals(DiscreteKeys{mode},
|
||||
conditionals));
|
||||
|
||||
VectorValues vv;
|
||||
vv.insert(Z(0), Z_1x1);
|
||||
|
@ -641,18 +649,22 @@ TEST(HybridEstimation, ModeSelection2) {
|
|||
double noise_tight = 0.5, noise_loose = 5.0;
|
||||
|
||||
HybridBayesNet bn;
|
||||
const DiscreteKey mode{M(0), 2};
|
||||
const DiscreteKey mode(M(0), 2);
|
||||
|
||||
bn.push_back(
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1));
|
||||
bn.push_back(
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1));
|
||||
|
||||
std::vector<GaussianConditional::shared_ptr> conditionals{
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1),
|
||||
Z_3x1, noise_loose),
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1),
|
||||
Z_3x1, noise_tight)};
|
||||
bn.emplace_shared<HybridGaussianConditional>(
|
||||
KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode},
|
||||
std::vector{GaussianConditional::sharedMeanAndStddev(
|
||||
Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_loose),
|
||||
GaussianConditional::sharedMeanAndStddev(
|
||||
Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_tight)});
|
||||
HybridGaussianConditional::Conditionals(DiscreteKeys{mode},
|
||||
conditionals));
|
||||
|
||||
VectorValues vv;
|
||||
vv.insert(Z(0), Z_3x1);
|
||||
|
@ -672,17 +684,15 @@ TEST(HybridEstimation, ModeSelection2) {
|
|||
graph.emplace_shared<PriorFactor<Vector3>>(X(0), Z_3x1, measurement_model);
|
||||
graph.emplace_shared<PriorFactor<Vector3>>(X(1), Z_3x1, measurement_model);
|
||||
|
||||
DiscreteKeys modes;
|
||||
modes.emplace_back(M(0), 2);
|
||||
|
||||
auto model0 = std::make_shared<BetweenFactor<Vector3>>(
|
||||
X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_loose)),
|
||||
model1 = std::make_shared<BetweenFactor<Vector3>>(
|
||||
X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_tight));
|
||||
|
||||
std::vector<NonlinearFactor::shared_ptr> components = {model0, model1};
|
||||
std::vector<NonlinearFactorValuePair> components = {{model0, 0.0},
|
||||
{model1, 0.0}};
|
||||
|
||||
KeyVector keys = {X(0), X(1)};
|
||||
DiscreteKey modes(M(0), 2);
|
||||
HybridNonlinearFactor mf(keys, modes, components);
|
||||
|
||||
initial.insert<Vector3>(X(0), Z_3x1);
|
||||
|
|
|
@ -52,9 +52,9 @@ TEST(HybridFactorGraph, Keys) {
|
|||
|
||||
// Add a gaussian mixture factor ϕ(x1, c1)
|
||||
DiscreteKey m1(M(1), 2);
|
||||
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
||||
M(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
||||
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
||||
DecisionTree<Key, GaussianFactorValuePair> dt(
|
||||
M(1), {std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0},
|
||||
{std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0});
|
||||
hfg.add(HybridGaussianFactor({X(1)}, {m1}, dt));
|
||||
|
||||
KeySet expected_continuous{X(0), X(1)};
|
||||
|
|
|
@ -52,7 +52,9 @@ const std::vector<GaussianConditional::shared_ptr> conditionals{
|
|||
commonSigma),
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
|
||||
commonSigma)};
|
||||
const HybridGaussianConditional mixture({Z(0)}, {X(0)}, {mode}, conditionals);
|
||||
const HybridGaussianConditional mixture(
|
||||
{Z(0)}, {X(0)}, {mode},
|
||||
HybridGaussianConditional::Conditionals({mode}, conditionals));
|
||||
} // namespace equal_constants
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -153,7 +155,9 @@ const std::vector<GaussianConditional::shared_ptr> conditionals{
|
|||
0.5),
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
|
||||
3.0)};
|
||||
const HybridGaussianConditional mixture({Z(0)}, {X(0)}, {mode}, conditionals);
|
||||
const HybridGaussianConditional mixture(
|
||||
{Z(0)}, {X(0)}, {mode},
|
||||
HybridGaussianConditional::Conditionals({mode}, conditionals));
|
||||
} // namespace mode_dependent_constants
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -71,8 +71,9 @@ TEST(HybridGaussianFactor, Sum) {
|
|||
auto f20 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
|
||||
auto f21 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
|
||||
auto f22 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
|
||||
std::vector<GaussianFactor::shared_ptr> factorsA{f10, f11};
|
||||
std::vector<GaussianFactor::shared_ptr> factorsB{f20, f21, f22};
|
||||
std::vector<GaussianFactorValuePair> factorsA{{f10, 0.0}, {f11, 0.0}};
|
||||
std::vector<GaussianFactorValuePair> factorsB{
|
||||
{f20, 0.0}, {f21, 0.0}, {f22, 0.0}};
|
||||
|
||||
// TODO(Frank): why specify keys at all? And: keys in factor should be *all*
|
||||
// keys, deviating from Kevin's scheme. Should we index DT on DiscreteKey?
|
||||
|
@ -109,7 +110,7 @@ TEST(HybridGaussianFactor, Printing) {
|
|||
auto b = Matrix::Zero(2, 1);
|
||||
auto f10 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b);
|
||||
auto f11 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b);
|
||||
std::vector<GaussianFactor::shared_ptr> factors{f10, f11};
|
||||
std::vector<GaussianFactorValuePair> factors{{f10, 0.0}, {f11, 0.0}};
|
||||
|
||||
HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
|
||||
|
||||
|
@ -178,7 +179,7 @@ TEST(HybridGaussianFactor, Error) {
|
|||
|
||||
auto f0 = std::make_shared<JacobianFactor>(X(1), A01, X(2), A02, b);
|
||||
auto f1 = std::make_shared<JacobianFactor>(X(1), A11, X(2), A12, b);
|
||||
std::vector<GaussianFactor::shared_ptr> factors{f0, f1};
|
||||
std::vector<GaussianFactorValuePair> factors{{f0, 0.0}, {f1, 0.0}};
|
||||
|
||||
HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
|
||||
|
||||
|
@ -232,8 +233,11 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1,
|
|||
c1 = make_shared<GaussianConditional>(z, Vector1(mu1), I_1x1, model1);
|
||||
|
||||
HybridBayesNet hbn;
|
||||
DiscreteKeys discreteParents{m};
|
||||
hbn.emplace_shared<HybridGaussianConditional>(
|
||||
KeyVector{z}, KeyVector{}, DiscreteKeys{m}, std::vector{c0, c1});
|
||||
KeyVector{z}, KeyVector{}, discreteParents,
|
||||
HybridGaussianConditional::Conditionals(discreteParents,
|
||||
std::vector{c0, c1}));
|
||||
|
||||
auto mixing = make_shared<DiscreteConditional>(m, "50/50");
|
||||
hbn.push_back(mixing);
|
||||
|
@ -407,8 +411,11 @@ static HybridGaussianConditional::shared_ptr CreateHybridMotionModel(
|
|||
-I_1x1, model0),
|
||||
c1 = make_shared<GaussianConditional>(X(1), Vector1(mu1), I_1x1, X(0),
|
||||
-I_1x1, model1);
|
||||
DiscreteKeys discreteParents{m1};
|
||||
return std::make_shared<HybridGaussianConditional>(
|
||||
KeyVector{X(1)}, KeyVector{X(0)}, DiscreteKeys{m1}, std::vector{c0, c1});
|
||||
KeyVector{X(1)}, KeyVector{X(0)}, discreteParents,
|
||||
HybridGaussianConditional::Conditionals(discreteParents,
|
||||
std::vector{c0, c1}));
|
||||
}
|
||||
|
||||
/// Create two state Bayes network with 1 or two measurement models
|
||||
|
@ -523,7 +530,7 @@ TEST(HybridGaussianFactor, TwoStateModel) {
|
|||
/**
|
||||
* Test a model P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1).
|
||||
*
|
||||
* P(f01|x1,x0,m1) has different means and different covariances.
|
||||
* P(x1|x0,m1) has different means and different covariances.
|
||||
*
|
||||
* Converting to a factor graph gives us
|
||||
* ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1)
|
||||
|
@ -613,13 +620,107 @@ TEST(HybridGaussianFactor, TwoStateModel2) {
|
|||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
/**
|
||||
* Test a model p(z0|x0)p(x1|x0,m1)p(z1|x1)p(m1).
|
||||
*
|
||||
* p(x1|x0,m1) has the same means but different covariances.
|
||||
*
|
||||
* Converting to a factor graph gives us
|
||||
* ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)p(m1)
|
||||
*
|
||||
* If we only have a measurement on z0, then
|
||||
* the p(m1) should be 0.5/0.5.
|
||||
* Getting a measurement on z1 gives use more information.
|
||||
*/
|
||||
TEST(HybridGaussianFactor, TwoStateModel3) {
|
||||
using namespace test_two_state_estimation;
|
||||
|
||||
double mu = 1.0;
|
||||
double sigma0 = 0.5, sigma1 = 2.0;
|
||||
auto hybridMotionModel = CreateHybridMotionModel(mu, mu, sigma0, sigma1);
|
||||
|
||||
// Start with no measurement on x1, only on x0
|
||||
const Vector1 z0(0.5);
|
||||
VectorValues given;
|
||||
given.insert(Z(0), z0);
|
||||
|
||||
{
|
||||
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel);
|
||||
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
|
||||
|
||||
// Check that ratio of Bayes net and factor graph for different modes is
|
||||
// equal for several values of {x0,x1}.
|
||||
for (VectorValues vv :
|
||||
{VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
|
||||
VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
|
||||
vv.insert(given); // add measurements for HBN
|
||||
HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}});
|
||||
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
|
||||
gfg.error(hv1) / hbn.error(hv1), 1e-9);
|
||||
}
|
||||
|
||||
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
|
||||
|
||||
// Importance sampling run with 100k samples gives 50.095/49.905
|
||||
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
|
||||
|
||||
// Since no measurement on x1, we a 50/50 probability
|
||||
auto p_m = bn->at(2)->asDiscrete();
|
||||
EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 0}}), 1e-9);
|
||||
EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 1}}), 1e-9);
|
||||
}
|
||||
|
||||
{
|
||||
// Now we add a measurement z1 on x1
|
||||
const Vector1 z1(4.0); // favors m==1
|
||||
given.insert(Z(1), z1);
|
||||
|
||||
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
|
||||
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
|
||||
|
||||
// Check that ratio of Bayes net and factor graph for different modes is
|
||||
// equal for several values of {x0,x1}.
|
||||
for (VectorValues vv :
|
||||
{VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
|
||||
VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
|
||||
vv.insert(given); // add measurements for HBN
|
||||
HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}});
|
||||
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
|
||||
gfg.error(hv1) / hbn.error(hv1), 1e-9);
|
||||
}
|
||||
|
||||
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
|
||||
|
||||
// Values taken from an importance sampling run with 100k samples:
|
||||
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
|
||||
DiscreteConditional expected(m1, "51.7762/48.2238");
|
||||
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002));
|
||||
}
|
||||
|
||||
{
|
||||
// Add a different measurement z1 on x1 that favors m==1
|
||||
const Vector1 z1(7.0);
|
||||
given.insert_or_assign(Z(1), z1);
|
||||
|
||||
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
|
||||
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
|
||||
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
|
||||
|
||||
// Values taken from an importance sampling run with 100k samples:
|
||||
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
|
||||
DiscreteConditional expected(m1, "49.0762/50.9238");
|
||||
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.005));
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
/**
|
||||
* Same model, P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1), but now with very informative
|
||||
* measurements and vastly different motion model: either stand still or move
|
||||
* far. This yields a very informative posterior.
|
||||
*/
|
||||
TEST(HybridGaussianFactor, TwoStateModel3) {
|
||||
TEST(HybridGaussianFactor, TwoStateModel4) {
|
||||
using namespace test_two_state_estimation;
|
||||
|
||||
double mu0 = 0.0, mu1 = 10.0;
|
||||
|
|
|
@ -127,9 +127,9 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) {
|
|||
|
||||
// Add a gaussian mixture factor ϕ(x1, c1)
|
||||
DiscreteKey m1(M(1), 2);
|
||||
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
||||
M(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
||||
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
||||
DecisionTree<Key, GaussianFactorValuePair> dt(
|
||||
M(1), {std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0},
|
||||
{std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0});
|
||||
hfg.add(HybridGaussianFactor({X(1)}, {m1}, dt));
|
||||
|
||||
auto result = hfg.eliminateSequential();
|
||||
|
@ -153,9 +153,9 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) {
|
|||
// Add factor between x0 and x1
|
||||
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
||||
|
||||
std::vector<GaussianFactor::shared_ptr> factors = {
|
||||
std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
||||
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())};
|
||||
std::vector<GaussianFactorValuePair> factors = {
|
||||
{std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0},
|
||||
{std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0}};
|
||||
hfg.add(HybridGaussianFactor({X(1)}, {m1}, factors));
|
||||
|
||||
// Discrete probability table for c1
|
||||
|
@ -178,10 +178,10 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) {
|
|||
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
||||
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
||||
|
||||
hfg.add(HybridGaussianFactor(
|
||||
{X(1)}, {{M(1), 2}},
|
||||
{std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
||||
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())}));
|
||||
std::vector<GaussianFactorValuePair> factors = {
|
||||
{std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0},
|
||||
{std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0}};
|
||||
hfg.add(HybridGaussianFactor({X(1)}, {M(1), 2}, factors));
|
||||
|
||||
hfg.add(DecisionTreeFactor(m1, {2, 8}));
|
||||
// TODO(Varun) Adding extra discrete variable not connected to continuous
|
||||
|
@ -208,9 +208,9 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) {
|
|||
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
||||
|
||||
// Decision tree with different modes on x1
|
||||
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
||||
M(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
||||
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
||||
DecisionTree<Key, GaussianFactorValuePair> dt(
|
||||
M(1), {std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0},
|
||||
{std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0});
|
||||
|
||||
// Hybrid factor P(x1|c1)
|
||||
hfg.add(HybridGaussianFactor({X(1)}, {m}, dt));
|
||||
|
@ -238,14 +238,14 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
|
|||
hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1));
|
||||
|
||||
{
|
||||
hfg.add(HybridGaussianFactor(
|
||||
{X(0)}, {{M(0), 2}},
|
||||
{std::make_shared<JacobianFactor>(X(0), I_3x3, Z_3x1),
|
||||
std::make_shared<JacobianFactor>(X(0), I_3x3, Vector3::Ones())}));
|
||||
std::vector<GaussianFactorValuePair> factors = {
|
||||
{std::make_shared<JacobianFactor>(X(0), I_3x3, Z_3x1), 0.0},
|
||||
{std::make_shared<JacobianFactor>(X(0), I_3x3, Vector3::Ones()), 0.0}};
|
||||
hfg.add(HybridGaussianFactor({X(0)}, {M(0), 2}, factors));
|
||||
|
||||
DecisionTree<Key, GaussianFactor::shared_ptr> dt1(
|
||||
M(1), std::make_shared<JacobianFactor>(X(2), I_3x3, Z_3x1),
|
||||
std::make_shared<JacobianFactor>(X(2), I_3x3, Vector3::Ones()));
|
||||
DecisionTree<Key, GaussianFactorValuePair> dt1(
|
||||
M(1), {std::make_shared<JacobianFactor>(X(2), I_3x3, Z_3x1), 0.0},
|
||||
{std::make_shared<JacobianFactor>(X(2), I_3x3, Vector3::Ones()), 0.0});
|
||||
|
||||
hfg.add(HybridGaussianFactor({X(2)}, {{M(1), 2}}, dt1));
|
||||
}
|
||||
|
@ -256,15 +256,15 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
|
|||
hfg.add(JacobianFactor(X(4), I_3x3, X(5), -I_3x3, Z_3x1));
|
||||
|
||||
{
|
||||
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
||||
M(3), std::make_shared<JacobianFactor>(X(3), I_3x3, Z_3x1),
|
||||
std::make_shared<JacobianFactor>(X(3), I_3x3, Vector3::Ones()));
|
||||
DecisionTree<Key, GaussianFactorValuePair> dt(
|
||||
M(3), {std::make_shared<JacobianFactor>(X(3), I_3x3, Z_3x1), 0.0},
|
||||
{std::make_shared<JacobianFactor>(X(3), I_3x3, Vector3::Ones()), 0.0});
|
||||
|
||||
hfg.add(HybridGaussianFactor({X(3)}, {{M(3), 2}}, dt));
|
||||
|
||||
DecisionTree<Key, GaussianFactor::shared_ptr> dt1(
|
||||
M(2), std::make_shared<JacobianFactor>(X(5), I_3x3, Z_3x1),
|
||||
std::make_shared<JacobianFactor>(X(5), I_3x3, Vector3::Ones()));
|
||||
DecisionTree<Key, GaussianFactorValuePair> dt1(
|
||||
M(2), {std::make_shared<JacobianFactor>(X(5), I_3x3, Z_3x1), 0.0},
|
||||
{std::make_shared<JacobianFactor>(X(5), I_3x3, Vector3::Ones()), 0.0});
|
||||
|
||||
hfg.add(HybridGaussianFactor({X(5)}, {{M(2), 2}}, dt1));
|
||||
}
|
||||
|
@ -552,9 +552,9 @@ TEST(HybridGaussianFactorGraph, optimize) {
|
|||
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
||||
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
||||
|
||||
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
||||
C(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
||||
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
||||
DecisionTree<Key, GaussianFactorValuePair> dt(
|
||||
C(1), {std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0},
|
||||
{std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0});
|
||||
|
||||
hfg.add(HybridGaussianFactor({X(1)}, {c1}, dt));
|
||||
|
||||
|
@ -682,8 +682,11 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) {
|
|||
x0, -I_1x1, model0),
|
||||
c1 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1,
|
||||
x0, -I_1x1, model1);
|
||||
DiscreteKeys discreteParents{m1};
|
||||
hbn.emplace_shared<HybridGaussianConditional>(
|
||||
KeyVector{f01}, KeyVector{x0, x1}, DiscreteKeys{m1}, std::vector{c0, c1});
|
||||
KeyVector{f01}, KeyVector{x0, x1}, discreteParents,
|
||||
HybridGaussianConditional::Conditionals(discreteParents,
|
||||
std::vector{c0, c1}));
|
||||
|
||||
// Discrete uniform prior.
|
||||
hbn.emplace_shared<DiscreteConditional>(m1, "0.5/0.5");
|
||||
|
@ -806,9 +809,11 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) {
|
|||
X(0), Vector1(14.1421), I_1x1 * 2.82843),
|
||||
conditional1 = std::make_shared<GaussianConditional>(
|
||||
X(0), Vector1(10.1379), I_1x1 * 2.02759);
|
||||
DiscreteKeys discreteParents{mode};
|
||||
expectedBayesNet.emplace_shared<HybridGaussianConditional>(
|
||||
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
|
||||
std::vector{conditional0, conditional1});
|
||||
KeyVector{X(0)}, KeyVector{}, discreteParents,
|
||||
HybridGaussianConditional::Conditionals(
|
||||
discreteParents, std::vector{conditional0, conditional1}));
|
||||
|
||||
// Add prior on mode.
|
||||
expectedBayesNet.emplace_shared<DiscreteConditional>(mode, "74/26");
|
||||
|
@ -831,12 +836,13 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
|
|||
HybridBayesNet bn;
|
||||
|
||||
// Create Gaussian mixture z_0 = x0 + noise for each measurement.
|
||||
std::vector<GaussianConditional::shared_ptr> conditionals{
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3),
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 0.5)};
|
||||
auto gm = std::make_shared<HybridGaussianConditional>(
|
||||
KeyVector{Z(0)}, KeyVector{X(0)}, DiscreteKeys{mode},
|
||||
std::vector{
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3),
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1,
|
||||
0.5)});
|
||||
HybridGaussianConditional::Conditionals(DiscreteKeys{mode},
|
||||
conditionals));
|
||||
bn.push_back(gm);
|
||||
|
||||
// Create prior on X(0).
|
||||
|
@ -865,7 +871,8 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
|
|||
X(0), Vector1(14.1421), I_1x1 * 2.82843);
|
||||
expectedBayesNet.emplace_shared<HybridGaussianConditional>(
|
||||
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
|
||||
std::vector{conditional0, conditional1});
|
||||
HybridGaussianConditional::Conditionals(
|
||||
DiscreteKeys{mode}, std::vector{conditional0, conditional1}));
|
||||
|
||||
// Add prior on mode.
|
||||
expectedBayesNet.emplace_shared<DiscreteConditional>(mode, "1/1");
|
||||
|
@ -902,7 +909,8 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) {
|
|||
X(0), Vector1(10.274), I_1x1 * 2.0548);
|
||||
expectedBayesNet.emplace_shared<HybridGaussianConditional>(
|
||||
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
|
||||
std::vector{conditional0, conditional1});
|
||||
HybridGaussianConditional::Conditionals(
|
||||
DiscreteKeys{mode}, std::vector{conditional0, conditional1}));
|
||||
|
||||
// Add prior on mode.
|
||||
expectedBayesNet.emplace_shared<DiscreteConditional>(mode, "23/77");
|
||||
|
@ -947,12 +955,14 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) {
|
|||
for (size_t t : {0, 1, 2}) {
|
||||
// Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t):
|
||||
const auto noise_mode_t = DiscreteKey{N(t), 2};
|
||||
std::vector<GaussianConditional::shared_ptr> conditionals{
|
||||
GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), Z_1x1, 0.5),
|
||||
GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), Z_1x1,
|
||||
3.0)};
|
||||
bn.emplace_shared<HybridGaussianConditional>(
|
||||
KeyVector{Z(t)}, KeyVector{X(t)}, DiscreteKeys{noise_mode_t},
|
||||
std::vector{GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t),
|
||||
Z_1x1, 0.5),
|
||||
GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t),
|
||||
Z_1x1, 3.0)});
|
||||
HybridGaussianConditional::Conditionals(DiscreteKeys{noise_mode_t},
|
||||
conditionals));
|
||||
|
||||
// Create prior on discrete mode N(t):
|
||||
bn.emplace_shared<DiscreteConditional>(noise_mode_t, "20/80");
|
||||
|
@ -962,12 +972,15 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) {
|
|||
for (size_t t : {2, 1}) {
|
||||
// Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1):
|
||||
const auto motion_model_t = DiscreteKey{M(t), 2};
|
||||
std::vector<GaussianConditional::shared_ptr> conditionals{
|
||||
GaussianConditional::sharedMeanAndStddev(X(t), I_1x1, X(t - 1), Z_1x1,
|
||||
0.2),
|
||||
GaussianConditional::sharedMeanAndStddev(X(t), I_1x1, X(t - 1), I_1x1,
|
||||
0.2)};
|
||||
auto gm = std::make_shared<HybridGaussianConditional>(
|
||||
KeyVector{X(t)}, KeyVector{X(t - 1)}, DiscreteKeys{motion_model_t},
|
||||
std::vector{GaussianConditional::sharedMeanAndStddev(
|
||||
X(t), I_1x1, X(t - 1), Z_1x1, 0.2),
|
||||
GaussianConditional::sharedMeanAndStddev(
|
||||
X(t), I_1x1, X(t - 1), I_1x1, 0.2)});
|
||||
HybridGaussianConditional::Conditionals(DiscreteKeys{motion_model_t},
|
||||
conditionals));
|
||||
bn.push_back(gm);
|
||||
|
||||
// Create prior on motion model M(t):
|
||||
|
|
|
@ -420,9 +420,10 @@ TEST(HybridGaussianISAM, NonTrivial) {
|
|||
noise_model),
|
||||
moving = std::make_shared<PlanarMotionModel>(W(0), W(1), odometry,
|
||||
noise_model);
|
||||
std::vector<PlanarMotionModel::shared_ptr> components = {moving, still};
|
||||
std::vector<std::pair<PlanarMotionModel::shared_ptr, double>> components = {
|
||||
{moving, 0.0}, {still, 0.0}};
|
||||
auto mixtureFactor = std::make_shared<HybridNonlinearFactor>(
|
||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
|
||||
contKeys, gtsam::DiscreteKey(M(1), 2), components);
|
||||
fg.push_back(mixtureFactor);
|
||||
|
||||
// Add equivalent of ImuFactor
|
||||
|
@ -460,9 +461,9 @@ TEST(HybridGaussianISAM, NonTrivial) {
|
|||
noise_model);
|
||||
moving =
|
||||
std::make_shared<PlanarMotionModel>(W(1), W(2), odometry, noise_model);
|
||||
components = {moving, still};
|
||||
components = {{moving, 0.0}, {still, 0.0}};
|
||||
mixtureFactor = std::make_shared<HybridNonlinearFactor>(
|
||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components);
|
||||
contKeys, gtsam::DiscreteKey(M(2), 2), components);
|
||||
fg.push_back(mixtureFactor);
|
||||
|
||||
// Add equivalent of ImuFactor
|
||||
|
@ -503,9 +504,9 @@ TEST(HybridGaussianISAM, NonTrivial) {
|
|||
noise_model);
|
||||
moving =
|
||||
std::make_shared<PlanarMotionModel>(W(2), W(3), odometry, noise_model);
|
||||
components = {moving, still};
|
||||
components = {{moving, 0.0}, {still, 0.0}};
|
||||
mixtureFactor = std::make_shared<HybridNonlinearFactor>(
|
||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components);
|
||||
contKeys, gtsam::DiscreteKey(M(3), 2), components);
|
||||
fg.push_back(mixtureFactor);
|
||||
|
||||
// Add equivalent of ImuFactor
|
||||
|
|
|
@ -58,7 +58,7 @@ TEST(HybridNonlinearFactor, Printing) {
|
|||
std::make_shared<BetweenFactor<double>>(X(1), X(2), between0, model);
|
||||
auto f1 =
|
||||
std::make_shared<BetweenFactor<double>>(X(1), X(2), between1, model);
|
||||
std::vector<NonlinearFactor::shared_ptr> factors{f0, f1};
|
||||
std::vector<NonlinearFactorValuePair> factors{{f0, 0.0}, {f1, 0.0}};
|
||||
|
||||
HybridNonlinearFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
|
||||
|
||||
|
@ -86,7 +86,7 @@ static HybridNonlinearFactor getHybridNonlinearFactor() {
|
|||
std::make_shared<BetweenFactor<double>>(X(1), X(2), between0, model);
|
||||
auto f1 =
|
||||
std::make_shared<BetweenFactor<double>>(X(1), X(2), between1, model);
|
||||
std::vector<NonlinearFactor::shared_ptr> factors{f0, f1};
|
||||
std::vector<NonlinearFactorValuePair> factors{{f0, 0.0}, {f1, 0.0}};
|
||||
|
||||
return HybridNonlinearFactor({X(1), X(2)}, {m1}, factors);
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@
|
|||
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
|
||||
#include <gtsam/linear/GaussianBayesNet.h>
|
||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||
#include <gtsam/linear/NoiseModel.h>
|
||||
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
|
||||
#include <gtsam/nonlinear/PriorFactor.h>
|
||||
#include <gtsam/sam/BearingRangeFactor.h>
|
||||
|
@ -131,9 +132,10 @@ TEST(HybridGaussianFactorGraph, Resize) {
|
|||
auto still = std::make_shared<MotionModel>(X(0), X(1), 0.0, noise_model),
|
||||
moving = std::make_shared<MotionModel>(X(0), X(1), 1.0, noise_model);
|
||||
|
||||
std::vector<MotionModel::shared_ptr> components = {still, moving};
|
||||
std::vector<std::pair<MotionModel::shared_ptr, double>> components = {
|
||||
{still, 0.0}, {moving, 0.0}};
|
||||
auto dcFactor = std::make_shared<HybridNonlinearFactor>(
|
||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
|
||||
contKeys, gtsam::DiscreteKey(M(1), 2), components);
|
||||
nhfg.push_back(dcFactor);
|
||||
|
||||
Values linearizationPoint;
|
||||
|
@ -162,17 +164,18 @@ TEST(HybridGaussianFactorGraph, HybridNonlinearFactor) {
|
|||
auto still = std::make_shared<MotionModel>(X(0), X(1), 0.0, noise_model),
|
||||
moving = std::make_shared<MotionModel>(X(0), X(1), 1.0, noise_model);
|
||||
|
||||
std::vector<MotionModel::shared_ptr> components = {still, moving};
|
||||
std::vector<std::pair<MotionModel::shared_ptr, double>> components = {
|
||||
{still, 0.0}, {moving, 0.0}};
|
||||
|
||||
// Check for exception when number of continuous keys are under-specified.
|
||||
KeyVector contKeys = {X(0)};
|
||||
THROWS_EXCEPTION(std::make_shared<HybridNonlinearFactor>(
|
||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components));
|
||||
contKeys, gtsam::DiscreteKey(M(1), 2), components));
|
||||
|
||||
// Check for exception when number of continuous keys are too many.
|
||||
contKeys = {X(0), X(1), X(2)};
|
||||
THROWS_EXCEPTION(std::make_shared<HybridNonlinearFactor>(
|
||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components));
|
||||
contKeys, gtsam::DiscreteKey(M(1), 2), components));
|
||||
}
|
||||
|
||||
/*****************************************************************************
|
||||
|
@ -440,7 +443,7 @@ TEST(HybridFactorGraph, Full_Elimination) {
|
|||
|
||||
DiscreteFactorGraph discrete_fg;
|
||||
// TODO(Varun) Make this a function of HybridGaussianFactorGraph?
|
||||
for (auto& factor : (*remainingFactorGraph_partial)) {
|
||||
for (auto &factor : (*remainingFactorGraph_partial)) {
|
||||
auto df = dynamic_pointer_cast<DiscreteFactor>(factor);
|
||||
assert(df);
|
||||
discrete_fg.push_back(df);
|
||||
|
@ -801,9 +804,10 @@ TEST(HybridFactorGraph, DefaultDecisionTree) {
|
|||
noise_model),
|
||||
moving = std::make_shared<PlanarMotionModel>(X(0), X(1), odometry,
|
||||
noise_model);
|
||||
std::vector<PlanarMotionModel::shared_ptr> motion_models = {still, moving};
|
||||
std::vector<std::pair<PlanarMotionModel::shared_ptr, double>> motion_models =
|
||||
{{still, 0.0}, {moving, 0.0}};
|
||||
fg.emplace_shared<HybridNonlinearFactor>(
|
||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, motion_models);
|
||||
contKeys, gtsam::DiscreteKey(M(1), 2), motion_models);
|
||||
|
||||
// Add Range-Bearing measurements to from X0 to L0 and X1 to L1.
|
||||
// create a noise model for the landmark measurements
|
||||
|
@ -838,9 +842,174 @@ TEST(HybridFactorGraph, DefaultDecisionTree) {
|
|||
EXPECT_LONGS_EQUAL(1, remainingFactorGraph->size());
|
||||
}
|
||||
|
||||
namespace test_relinearization {
|
||||
/**
|
||||
* @brief Create a Factor Graph by directly specifying all
|
||||
* the factors instead of creating conditionals first.
|
||||
* This way we can directly provide the likelihoods and
|
||||
* then perform (re-)linearization.
|
||||
*
|
||||
* @param means The means of the GaussianMixtureFactor components.
|
||||
* @param sigmas The covariances of the GaussianMixtureFactor components.
|
||||
* @param m1 The discrete key.
|
||||
* @param x0_measurement A measurement on X0
|
||||
* @return HybridGaussianFactorGraph
|
||||
*/
|
||||
static HybridNonlinearFactorGraph CreateFactorGraph(
|
||||
const std::vector<double> &means, const std::vector<double> &sigmas,
|
||||
DiscreteKey &m1, double x0_measurement) {
|
||||
auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]);
|
||||
auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]);
|
||||
auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3);
|
||||
|
||||
auto f0 =
|
||||
std::make_shared<BetweenFactor<double>>(X(0), X(1), means[0], model0);
|
||||
auto f1 =
|
||||
std::make_shared<BetweenFactor<double>>(X(0), X(1), means[1], model1);
|
||||
|
||||
// Create HybridNonlinearFactor
|
||||
std::vector<NonlinearFactorValuePair> factors{
|
||||
{f0, ComputeLogNormalizer(model0)}, {f1, ComputeLogNormalizer(model1)}};
|
||||
|
||||
HybridNonlinearFactor mixtureFactor({X(0), X(1)}, {m1}, factors);
|
||||
|
||||
HybridNonlinearFactorGraph hfg;
|
||||
hfg.push_back(mixtureFactor);
|
||||
|
||||
hfg.push_back(PriorFactor<double>(X(0), x0_measurement, prior_noise));
|
||||
|
||||
return hfg;
|
||||
}
|
||||
} // namespace test_relinearization
|
||||
|
||||
/* ************************************************************************* */
|
||||
/**
|
||||
* @brief Test components with differing means but the same covariances.
|
||||
* The factor graph is
|
||||
* *-X1-*-X2
|
||||
* |
|
||||
* M1
|
||||
*/
|
||||
TEST(HybridNonlinearFactorGraph, DifferentMeans) {
|
||||
using namespace test_relinearization;
|
||||
|
||||
DiscreteKey m1(M(1), 2);
|
||||
|
||||
Values values;
|
||||
double x0 = 0.0, x1 = 1.75;
|
||||
values.insert(X(0), x0);
|
||||
values.insert(X(1), x1);
|
||||
|
||||
std::vector<double> means = {0.0, 2.0}, sigmas = {1e-0, 1e-0};
|
||||
|
||||
HybridNonlinearFactorGraph hfg = CreateFactorGraph(means, sigmas, m1, x0);
|
||||
|
||||
{
|
||||
auto bn = hfg.linearize(values)->eliminateSequential();
|
||||
HybridValues actual = bn->optimize();
|
||||
|
||||
HybridValues expected(
|
||||
VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(-1.75)}},
|
||||
DiscreteValues{{M(1), 0}});
|
||||
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
|
||||
DiscreteValues dv0{{M(1), 0}};
|
||||
VectorValues cont0 = bn->optimize(dv0);
|
||||
double error0 = bn->error(HybridValues(cont0, dv0));
|
||||
|
||||
// TODO(Varun) Perform importance sampling to estimate error?
|
||||
|
||||
// regression
|
||||
EXPECT_DOUBLES_EQUAL(0.69314718056, error0, 1e-9);
|
||||
|
||||
DiscreteValues dv1{{M(1), 1}};
|
||||
VectorValues cont1 = bn->optimize(dv1);
|
||||
double error1 = bn->error(HybridValues(cont1, dv1));
|
||||
EXPECT_DOUBLES_EQUAL(error0, error1, 1e-9);
|
||||
}
|
||||
|
||||
{
|
||||
// Add measurement on x1
|
||||
auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3);
|
||||
hfg.push_back(PriorFactor<double>(X(1), means[1], prior_noise));
|
||||
|
||||
auto bn = hfg.linearize(values)->eliminateSequential();
|
||||
HybridValues actual = bn->optimize();
|
||||
|
||||
HybridValues expected(
|
||||
VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(0.25)}},
|
||||
DiscreteValues{{M(1), 1}});
|
||||
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
|
||||
{
|
||||
DiscreteValues dv{{M(1), 0}};
|
||||
VectorValues cont = bn->optimize(dv);
|
||||
double error = bn->error(HybridValues(cont, dv));
|
||||
// regression
|
||||
EXPECT_DOUBLES_EQUAL(2.12692448787, error, 1e-9);
|
||||
}
|
||||
{
|
||||
DiscreteValues dv{{M(1), 1}};
|
||||
VectorValues cont = bn->optimize(dv);
|
||||
double error = bn->error(HybridValues(cont, dv));
|
||||
// regression
|
||||
EXPECT_DOUBLES_EQUAL(0.126928487854, error, 1e-9);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
/**
|
||||
* @brief Test components with differing covariances but the same means.
|
||||
* The factor graph is
|
||||
* *-X1-*-X2
|
||||
* |
|
||||
* M1
|
||||
*/
|
||||
TEST_DISABLED(HybridNonlinearFactorGraph, DifferentCovariances) {
|
||||
using namespace test_relinearization;
|
||||
|
||||
DiscreteKey m1(M(1), 2);
|
||||
|
||||
Values values;
|
||||
double x0 = 1.0, x1 = 1.0;
|
||||
values.insert(X(0), x0);
|
||||
values.insert(X(1), x1);
|
||||
|
||||
std::vector<double> means = {0.0, 0.0}, sigmas = {1e2, 1e-2};
|
||||
|
||||
// Create FG with HybridNonlinearFactor and prior on X1
|
||||
HybridNonlinearFactorGraph hfg = CreateFactorGraph(means, sigmas, m1, x0);
|
||||
// Linearize and eliminate
|
||||
auto hbn = hfg.linearize(values)->eliminateSequential();
|
||||
|
||||
VectorValues cv;
|
||||
cv.insert(X(0), Vector1(0.0));
|
||||
cv.insert(X(1), Vector1(0.0));
|
||||
|
||||
// Check that the error values at the MLE point μ.
|
||||
AlgebraicDecisionTree<Key> errorTree = hbn->errorTree(cv);
|
||||
|
||||
DiscreteValues dv0{{M(1), 0}};
|
||||
DiscreteValues dv1{{M(1), 1}};
|
||||
|
||||
// regression
|
||||
EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9);
|
||||
EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9);
|
||||
|
||||
DiscreteConditional expected_m1(m1, "0.5/0.5");
|
||||
DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete());
|
||||
|
||||
EXPECT(assert_equal(expected_m1, actual_m1));
|
||||
}
|
||||
|
||||
/* *************************************************************************
|
||||
*/
|
||||
int main() {
|
||||
TestResult tr;
|
||||
return TestRegistry::runAllTests(tr);
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
/* *************************************************************************
|
||||
*/
|
||||
|
|
|
@ -439,9 +439,10 @@ TEST(HybridNonlinearISAM, NonTrivial) {
|
|||
noise_model),
|
||||
moving = std::make_shared<PlanarMotionModel>(W(0), W(1), odometry,
|
||||
noise_model);
|
||||
std::vector<PlanarMotionModel::shared_ptr> components = {moving, still};
|
||||
std::vector<std::pair<PlanarMotionModel::shared_ptr, double>> components = {
|
||||
{moving, 0.0}, {still, 0.0}};
|
||||
auto mixtureFactor = std::make_shared<HybridNonlinearFactor>(
|
||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
|
||||
contKeys, gtsam::DiscreteKey(M(1), 2), components);
|
||||
fg.push_back(mixtureFactor);
|
||||
|
||||
// Add equivalent of ImuFactor
|
||||
|
@ -479,9 +480,9 @@ TEST(HybridNonlinearISAM, NonTrivial) {
|
|||
noise_model);
|
||||
moving =
|
||||
std::make_shared<PlanarMotionModel>(W(1), W(2), odometry, noise_model);
|
||||
components = {moving, still};
|
||||
components = {{moving, 0.0}, {still, 0.0}};
|
||||
mixtureFactor = std::make_shared<HybridNonlinearFactor>(
|
||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components);
|
||||
contKeys, gtsam::DiscreteKey(M(2), 2), components);
|
||||
fg.push_back(mixtureFactor);
|
||||
|
||||
// Add equivalent of ImuFactor
|
||||
|
@ -522,9 +523,9 @@ TEST(HybridNonlinearISAM, NonTrivial) {
|
|||
noise_model);
|
||||
moving =
|
||||
std::make_shared<PlanarMotionModel>(W(2), W(3), odometry, noise_model);
|
||||
components = {moving, still};
|
||||
components = {{moving, 0.0}, {still, 0.0}};
|
||||
mixtureFactor = std::make_shared<HybridNonlinearFactor>(
|
||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components);
|
||||
contKeys, gtsam::DiscreteKey(M(3), 2), components);
|
||||
fg.push_back(mixtureFactor);
|
||||
|
||||
// Add equivalent of ImuFactor
|
||||
|
|
|
@ -76,16 +76,16 @@ BOOST_CLASS_EXPORT_GUID(HybridBayesNet, "gtsam_HybridBayesNet");
|
|||
// Test HybridGaussianFactor serialization.
|
||||
TEST(HybridSerialization, HybridGaussianFactor) {
|
||||
KeyVector continuousKeys{X(0)};
|
||||
DiscreteKeys discreteKeys{{M(0), 2}};
|
||||
DiscreteKey discreteKey{M(0), 2};
|
||||
|
||||
auto A = Matrix::Zero(2, 1);
|
||||
auto b0 = Matrix::Zero(2, 1);
|
||||
auto b1 = Matrix::Ones(2, 1);
|
||||
auto f0 = std::make_shared<JacobianFactor>(X(0), A, b0);
|
||||
auto f1 = std::make_shared<JacobianFactor>(X(0), A, b1);
|
||||
std::vector<GaussianFactor::shared_ptr> factors{f0, f1};
|
||||
std::vector<GaussianFactorValuePair> factors{{f0, 0.0}, {f1, 0.0}};
|
||||
|
||||
const HybridGaussianFactor factor(continuousKeys, discreteKeys, factors);
|
||||
const HybridGaussianFactor factor(continuousKeys, discreteKey, factors);
|
||||
|
||||
EXPECT(equalsObj<HybridGaussianFactor>(factor));
|
||||
EXPECT(equalsXML<HybridGaussianFactor>(factor));
|
||||
|
@ -116,7 +116,8 @@ TEST(HybridSerialization, HybridGaussianConditional) {
|
|||
const auto conditional1 = std::make_shared<GaussianConditional>(
|
||||
GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3));
|
||||
const HybridGaussianConditional gm({Z(0)}, {X(0)}, {mode},
|
||||
{conditional0, conditional1});
|
||||
HybridGaussianConditional::Conditionals(
|
||||
{mode}, {conditional0, conditional1}));
|
||||
|
||||
EXPECT(equalsObj<HybridGaussianConditional>(gm));
|
||||
EXPECT(equalsXML<HybridGaussianConditional>(gm));
|
||||
|
|
|
@ -707,6 +707,25 @@ const RobustModel::shared_ptr &robust, const NoiseModel::shared_ptr noise){
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
} // namespace noiseModel
|
||||
|
||||
/* *******************************************************************************/
|
||||
double ComputeLogNormalizer(
|
||||
const noiseModel::Gaussian::shared_ptr& noise_model) {
|
||||
// Since noise models are Gaussian, we can get the logDeterminant using
|
||||
// the same trick as in GaussianConditional
|
||||
// Sigma = (R'R)^{-1}, det(Sigma) = det((R'R)^{-1}) = det(R'R)^{-1}
|
||||
// log det(Sigma) = -log(det(R'R)) = -2*log(det(R))
|
||||
// Hence, log det(Sigma)) = -2.0 * logDetR()
|
||||
double logDetR = noise_model->R()
|
||||
.diagonal()
|
||||
.unaryExpr([](double x) { return log(x); })
|
||||
.sum();
|
||||
double logDeterminantSigma = -2.0 * logDetR;
|
||||
|
||||
size_t n = noise_model->dim();
|
||||
constexpr double log2pi = 1.8378770664093454835606594728112;
|
||||
return n * log2pi + logDeterminantSigma;
|
||||
}
|
||||
|
||||
} // gtsam
|
||||
|
|
|
@ -751,6 +751,18 @@ namespace gtsam {
|
|||
template<> struct traits<noiseModel::Isotropic> : public Testable<noiseModel::Isotropic> {};
|
||||
template<> struct traits<noiseModel::Unit> : public Testable<noiseModel::Unit> {};
|
||||
|
||||
/**
|
||||
* @brief Helper function to compute the log(|2πΣ|) normalizer values
|
||||
* for a Gaussian noise model.
|
||||
* We compute this in the log-space for numerical accuracy.
|
||||
*
|
||||
* @param noise_model The Gaussian noise model
|
||||
* whose normalizer we wish to compute.
|
||||
* @return double
|
||||
*/
|
||||
GTSAM_EXPORT double ComputeLogNormalizer(
|
||||
const noiseModel::Gaussian::shared_ptr& noise_model);
|
||||
|
||||
} //\ namespace gtsam
|
||||
|
||||
|
||||
|
|
|
@ -807,6 +807,26 @@ TEST(NoiseModel, NonDiagonalGaussian)
|
|||
}
|
||||
}
|
||||
|
||||
TEST(NoiseModel, ComputeLogNormalizer) {
|
||||
// Very simple 1D noise model, which we can compute by hand.
|
||||
double sigma = 0.1;
|
||||
auto noise_model = Isotropic::Sigma(1, sigma);
|
||||
double actual_value = ComputeLogNormalizer(noise_model);
|
||||
// Compute log(|2πΣ|) by hand.
|
||||
// = log(2π) + log(Σ) (since it is 1D)
|
||||
constexpr double log2pi = 1.8378770664093454835606594728112;
|
||||
double expected_value = log2pi + log(sigma * sigma);
|
||||
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
|
||||
|
||||
// Similar situation in the 3D case
|
||||
size_t n = 3;
|
||||
auto noise_model2 = Isotropic::Sigma(n, sigma);
|
||||
double actual_value2 = ComputeLogNormalizer(noise_model2);
|
||||
// We multiply by 3 due to the determinant
|
||||
double expected_value2 = n * (log2pi + log(sigma * sigma));
|
||||
EXPECT_DOUBLES_EQUAL(expected_value2, actual_value2, 1e-9);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main() { TestResult tr; return TestRegistry::runAllTests(tr); }
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -43,14 +43,12 @@ class TestHybridBayesNet(GtsamTestCase):
|
|||
# Create the conditionals
|
||||
conditional0 = GaussianConditional(X(1), [5], I_1x1, model0)
|
||||
conditional1 = GaussianConditional(X(1), [2], I_1x1, model1)
|
||||
discrete_keys = DiscreteKeys()
|
||||
discrete_keys.push_back(Asia)
|
||||
|
||||
# Create hybrid Bayes net.
|
||||
bayesNet = HybridBayesNet()
|
||||
bayesNet.push_back(conditional)
|
||||
bayesNet.push_back(
|
||||
HybridGaussianConditional([X(1)], [], discrete_keys,
|
||||
HybridGaussianConditional([X(1)], [], Asia,
|
||||
[conditional0, conditional1]))
|
||||
bayesNet.push_back(DiscreteConditional(Asia, "99/1"))
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ import gtsam
|
|||
from gtsam import (DiscreteConditional, DiscreteKeys, GaussianConditional,
|
||||
HybridBayesNet, HybridGaussianConditional,
|
||||
HybridGaussianFactor, HybridGaussianFactorGraph,
|
||||
HybridValues, JacobianFactor, Ordering, noiseModel)
|
||||
HybridValues, JacobianFactor, noiseModel)
|
||||
|
||||
DEBUG_MARGINALS = False
|
||||
|
||||
|
@ -31,13 +31,11 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
|
|||
def test_create(self):
|
||||
"""Test construction of hybrid factor graph."""
|
||||
model = noiseModel.Unit.Create(3)
|
||||
dk = DiscreteKeys()
|
||||
dk.push_back((C(0), 2))
|
||||
|
||||
jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model)
|
||||
jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model)
|
||||
|
||||
gmf = HybridGaussianFactor([X(0)], dk, [jf1, jf2])
|
||||
gmf = HybridGaussianFactor([X(0)], (C(0), 2), [(jf1, 0), (jf2, 0)])
|
||||
|
||||
hfg = HybridGaussianFactorGraph()
|
||||
hfg.push_back(jf1)
|
||||
|
@ -58,13 +56,11 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
|
|||
def test_optimize(self):
|
||||
"""Test construction of hybrid factor graph."""
|
||||
model = noiseModel.Unit.Create(3)
|
||||
dk = DiscreteKeys()
|
||||
dk.push_back((C(0), 2))
|
||||
|
||||
jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model)
|
||||
jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model)
|
||||
|
||||
gmf = HybridGaussianFactor([X(0)], dk, [jf1, jf2])
|
||||
gmf = HybridGaussianFactor([X(0)], (C(0), 2), [(jf1, 0), (jf2, 0)])
|
||||
|
||||
hfg = HybridGaussianFactorGraph()
|
||||
hfg.push_back(jf1)
|
||||
|
@ -96,8 +92,6 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
|
|||
|
||||
# Create Gaussian mixture Z(0) = X(0) + noise for each measurement.
|
||||
I_1x1 = np.eye(1)
|
||||
keys = DiscreteKeys()
|
||||
keys.push_back(mode)
|
||||
for i in range(num_measurements):
|
||||
conditional0 = GaussianConditional.FromMeanAndStddev(Z(i),
|
||||
I_1x1,
|
||||
|
@ -108,7 +102,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
|
|||
X(0), [0],
|
||||
sigma=3)
|
||||
bayesNet.push_back(
|
||||
HybridGaussianConditional([Z(i)], [X(0)], keys,
|
||||
HybridGaussianConditional([Z(i)], [X(0)], mode,
|
||||
[conditional0, conditional1]))
|
||||
|
||||
# Create prior on X(0).
|
||||
|
|
|
@ -27,21 +27,18 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
|
|||
|
||||
def test_nonlinear_hybrid(self):
|
||||
nlfg = gtsam.HybridNonlinearFactorGraph()
|
||||
dk = gtsam.DiscreteKeys()
|
||||
dk.push_back((10, 2))
|
||||
nlfg.push_back(
|
||||
BetweenFactorPoint3(1, 2, Point3(1, 2, 3),
|
||||
noiseModel.Diagonal.Variances([1, 1, 1])))
|
||||
nlfg.push_back(
|
||||
PriorFactorPoint3(2, Point3(1, 2, 3),
|
||||
noiseModel.Diagonal.Variances([0.5, 0.5, 0.5])))
|
||||
nlfg.push_back(
|
||||
gtsam.HybridNonlinearFactor([1], dk, [
|
||||
PriorFactorPoint3(1, Point3(0, 0, 0),
|
||||
noiseModel.Unit.Create(3)),
|
||||
PriorFactorPoint3(1, Point3(1, 2, 1),
|
||||
noiseModel.Unit.Create(3))
|
||||
]))
|
||||
|
||||
factors = [(PriorFactorPoint3(1, Point3(0, 0, 0),
|
||||
noiseModel.Unit.Create(3)), 0.0),
|
||||
(PriorFactorPoint3(1, Point3(1, 2, 1),
|
||||
noiseModel.Unit.Create(3)), 0.0)]
|
||||
nlfg.push_back(gtsam.HybridNonlinearFactor([1], (10, 2), factors))
|
||||
nlfg.push_back(gtsam.DecisionTreeFactor((10, 2), "1 3"))
|
||||
values = gtsam.Values()
|
||||
values.insert_point3(1, Point3(0, 0, 0))
|
||||
|
|
Loading…
Reference in New Issue