From cb226633c59236856a50ae962219656c8eb73e8b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 23 Oct 2023 18:24:25 -0400 Subject: [PATCH 001/398] IterativeSolver equals method --- gtsam/linear/IterativeSolver.cpp | 6 ++++++ gtsam/linear/IterativeSolver.h | 16 +++++++++------- 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/gtsam/linear/IterativeSolver.cpp b/gtsam/linear/IterativeSolver.cpp index c5a25c4b8..e9b0a5f13 100644 --- a/gtsam/linear/IterativeSolver.cpp +++ b/gtsam/linear/IterativeSolver.cpp @@ -46,6 +46,12 @@ void IterativeOptimizationParameters::print(ostream &os) const { << verbosityTranslator(verbosity_) << endl; } +/*****************************************************************************/ +bool IterativeOptimizationParameters::equals( + const IterativeOptimizationParameters &other, double tol) const { + return verbosity_ == other.verbosity(); +} + /*****************************************************************************/ ostream& operator<<(ostream &os, const IterativeOptimizationParameters &p) { p.print(os); diff --git a/gtsam/linear/IterativeSolver.h b/gtsam/linear/IterativeSolver.h index 0441cd9da..1a66708d4 100644 --- a/gtsam/linear/IterativeSolver.h +++ b/gtsam/linear/IterativeSolver.h @@ -41,15 +41,14 @@ class VectorValues; * parameters for iterative linear solvers */ class IterativeOptimizationParameters { - -public: - + public: typedef std::shared_ptr shared_ptr; - enum Verbosity { - SILENT = 0, COMPLEXITY, ERROR - } verbosity_; + enum Verbosity { SILENT = 0, COMPLEXITY, ERROR }; -public: + protected: + Verbosity verbosity_; + + public: IterativeOptimizationParameters(Verbosity v = SILENT) : verbosity_(v) { @@ -71,6 +70,9 @@ public: /* virtual print function */ GTSAM_EXPORT virtual void print(std::ostream &os) const; + GTSAM_EXPORT virtual bool equals(const IterativeOptimizationParameters &other, + double tol = 1e-9) const; + /* for serialization */ GTSAM_EXPORT friend std::ostream &operator<<( std::ostream &os, const IterativeOptimizationParameters &p); From 48b270efc57fdde5d0726a050c0689a151d996eb Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 23 Oct 2023 18:24:41 -0400 Subject: [PATCH 002/398] improved equals method for NonlinearOptimizerParams --- gtsam/nonlinear/NonlinearOptimizerParams.h | 24 ++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/gtsam/nonlinear/NonlinearOptimizerParams.h b/gtsam/nonlinear/NonlinearOptimizerParams.h index cafb235bc..92e22e064 100644 --- a/gtsam/nonlinear/NonlinearOptimizerParams.h +++ b/gtsam/nonlinear/NonlinearOptimizerParams.h @@ -115,14 +115,22 @@ public: virtual void print(const std::string& str = "") const; bool equals(const NonlinearOptimizerParams& other, double tol = 1e-9) const { - return maxIterations == other.getMaxIterations() - && std::abs(relativeErrorTol - other.getRelativeErrorTol()) <= tol - && std::abs(absoluteErrorTol - other.getAbsoluteErrorTol()) <= tol - && std::abs(errorTol - other.getErrorTol()) <= tol - && verbosityTranslator(verbosity) == other.getVerbosity(); - // && orderingType.equals(other.getOrderingType()_; - // && linearSolverType == other.getLinearSolverType(); - // TODO: check ordering, iterativeParams, and iterationsHook + // Check for equality of shared ptrs + bool iterative_params_equal = false; + if (iterativeParams == other.iterativeParams) { + iterative_params_equal = true; + } + if (iterativeParams && other.iterativeParams) { + iterative_params_equal = iterativeParams->equals(*other.iterativeParams); + } + + return maxIterations == other.getMaxIterations() && + std::abs(relativeErrorTol - other.getRelativeErrorTol()) <= tol && + std::abs(absoluteErrorTol - other.getAbsoluteErrorTol()) <= tol && + std::abs(errorTol - other.getErrorTol()) <= tol && + verbosityTranslator(verbosity) == other.getVerbosity() && + orderingType == other.orderingType && ordering == other.ordering && + linearSolverType == other.linearSolverType && iterative_params_equal; } inline bool isMultifrontal() const { From 19542053ccb9047085aa5282108efe8835492828 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 15 Nov 2023 10:43:33 -0500 Subject: [PATCH 003/398] better formatting for LM Optimizer summary --- gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp b/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp index c49e3a65a..99682fb77 100644 --- a/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp +++ b/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp @@ -236,9 +236,9 @@ bool LevenbergMarquardtOptimizer::tryLambda(const GaussianFactorGraph& linear, if (currentState->iterations == 0) { cout << "iter cost cost_change lambda success iter_time" << endl; } - cout << setw(4) << currentState->iterations << " " << setw(8) << newError << " " << setw(3) << setprecision(2) - << costChange << " " << setw(3) << setprecision(2) << currentState->lambda << " " << setw(4) - << systemSolvedSuccessfully << " " << setw(3) << setprecision(2) << iterationTime << endl; + cout << setw(4) << currentState->iterations << " " << setw(12) << newError << " " << setw(12) << setprecision(2) + << costChange << " " << setw(10) << setprecision(2) << currentState->lambda << " " << setw(6) + << systemSolvedSuccessfully << " " << setw(10) << setprecision(2) << iterationTime << endl; } if (step_is_successful) { // we have successfully decreased the cost and we have good modelFidelity From 7695fd6de3fc00cf7d348dc2f30cd6e6f4548f1a Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 20 Nov 2023 13:24:39 -0500 Subject: [PATCH 004/398] Improved HybridBayesNet::optimize with proper model selection --- gtsam/hybrid/HybridBayesNet.cpp | 60 +++++++++++++++++++++++++++++++-- 1 file changed, 57 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 31177ddb7..ba869a6f5 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -220,15 +220,69 @@ GaussianBayesNet HybridBayesNet::choose( /* ************************************************************************* */ HybridValues HybridBayesNet::optimize() const { // Collect all the discrete factors to compute MPE - DiscreteBayesNet discrete_bn; + DiscreteFactorGraph discrete_fg; + VectorValues continuousValues; + for (auto &&conditional : *this) { if (conditional->isDiscrete()) { - discrete_bn.push_back(conditional->asDiscrete()); + discrete_fg.push_back(conditional->asDiscrete()); + } else { + /* + Perform the integration of L(X;M, Z)P(X|M) which is the model selection + term. + TODO(Varun) Write better comments detailing the whole process. + */ + if (conditional->isContinuous()) { + auto gc = conditional->asGaussian(); + for (GaussianConditional::const_iterator frontal = gc->beginFrontals(); + frontal != gc->endFrontals(); ++frontal) { + continuousValues.insert_or_assign(*frontal, + Vector::Zero(gc->getDim(frontal))); + } + } else if (conditional->isHybrid()) { + auto gm = conditional->asMixture(); + gm->conditionals().apply( + [&continuousValues](const GaussianConditional::shared_ptr &gc) { + if (gc) { + for (GaussianConditional::const_iterator frontal = gc->begin(); + frontal != gc->end(); ++frontal) { + continuousValues.insert_or_assign( + *frontal, Vector::Zero(gc->getDim(frontal))); + } + } + return gc; + }); + + DecisionTree error = gm->error(continuousValues); + + // Functional to take error and compute the probability + auto integrate = [&gm](const double &error) { + // q(mu; M, Z) = exp(-error) + // k = 1.0 / sqrt((2*pi)^n*det(Sigma)) + // thus, q*sqrt(|2*pi*Sigma|) = q/k = exp(log(q) - log(k)) + // = exp(-error - log(k)) + double prob = std::exp(-error - gm->logNormalizationConstant()); + if (prob > 1e-12) { + return prob; + } else { + return 1.0; + } + }; + AlgebraicDecisionTree model_selection = + DecisionTree(error, integrate); + + std::cout << "\n\nmodel selection"; + model_selection.print("", DefaultKeyFormatter); + discrete_fg.push_back( + DecisionTreeFactor(gm->discreteKeys(), model_selection)); + } } } // Solve for the MPE - DiscreteValues mpe = DiscreteFactorGraph(discrete_bn).optimize(); + discrete_fg.print(); + DiscreteValues mpe = discrete_fg.optimize(); + mpe.print("mpe"); // Given the MPE, compute the optimal continuous values. return HybridValues(optimize(mpe), mpe); From 39f7ac20a1d8a0bdaff47d31dd61331188a5a14c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 20 Nov 2023 13:28:05 -0500 Subject: [PATCH 005/398] handle nullptrsin GaussianMixture::error --- gtsam/hybrid/GaussianMixture.cpp | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 753e35bf0..a1ba167f5 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -316,8 +316,15 @@ AlgebraicDecisionTree GaussianMixture::logProbability( AlgebraicDecisionTree GaussianMixture::error( const VectorValues &continuousValues) const { auto errorFunc = [&](const GaussianConditional::shared_ptr &conditional) { - return conditional->error(continuousValues) + // - logConstant_ - conditional->logNormalizationConstant(); + // Check if valid pointer + if (conditional) { + return conditional->error(continuousValues) + // + logConstant_ - conditional->logNormalizationConstant(); + } else { + // If not valid, pointer, it means this conditional was pruned, + // so we return maximum error. + return std::numeric_limits::max(); + } }; DecisionTree errorTree(conditionals_, errorFunc); return errorTree; From c374a26b45ed1f25c597ad08d12594ee4414282e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 20 Nov 2023 13:31:07 -0500 Subject: [PATCH 006/398] nicer HybridBayesNet::optimize with normalized errors --- gtsam/hybrid/HybridBayesNet.cpp | 75 ++++++++++++++++++++++----------- 1 file changed, 51 insertions(+), 24 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index ba869a6f5..cd1157576 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -223,22 +223,38 @@ HybridValues HybridBayesNet::optimize() const { DiscreteFactorGraph discrete_fg; VectorValues continuousValues; + // Error values for each hybrid factor + AlgebraicDecisionTree error(0.0); + std::set discreteKeySet; + for (auto &&conditional : *this) { if (conditional->isDiscrete()) { discrete_fg.push_back(conditional->asDiscrete()); } else { /* - Perform the integration of L(X;M, Z)P(X|M) which is the model selection - term. - TODO(Varun) Write better comments detailing the whole process. + Perform the integration of L(X;M,Z)P(X|M) + which is the model selection term. + + By Bayes' rule, P(X|M) ∝ L(X;M,Z)P(X|M), + hence L(X;M,Z)P(X|M) is the unnormalized probabilty of + the joint Gaussian distribution. + + This can be computed by multiplying all the exponentiaed errors + of each of the conditionals, which we do below in hybrid case. */ if (conditional->isContinuous()) { + /* + If we are here, it means there are no discrete variables in + the Bayes net (due to strong elimination ordering). + This is a continuous-only problem hence model selection doesn't matter. + */ auto gc = conditional->asGaussian(); for (GaussianConditional::const_iterator frontal = gc->beginFrontals(); frontal != gc->endFrontals(); ++frontal) { continuousValues.insert_or_assign(*frontal, Vector::Zero(gc->getDim(frontal))); } + } else if (conditional->isHybrid()) { auto gm = conditional->asMixture(); gm->conditionals().apply( @@ -253,36 +269,47 @@ HybridValues HybridBayesNet::optimize() const { return gc; }); - DecisionTree error = gm->error(continuousValues); + /* + To perform model selection, we need: + q(mu; M, Z) * sqrt((2*pi)^n*det(Sigma)) - // Functional to take error and compute the probability - auto integrate = [&gm](const double &error) { - // q(mu; M, Z) = exp(-error) - // k = 1.0 / sqrt((2*pi)^n*det(Sigma)) - // thus, q*sqrt(|2*pi*Sigma|) = q/k = exp(log(q) - log(k)) - // = exp(-error - log(k)) - double prob = std::exp(-error - gm->logNormalizationConstant()); - if (prob > 1e-12) { - return prob; - } else { - return 1.0; - } + If q(mu; M, Z) = exp(-error) & k = 1.0 / sqrt((2*pi)^n*det(Sigma)) + thus, q*sqrt(|2*pi*Sigma|) = q/k = exp(log(q/k)) + = exp(log(q) - log(k)) = exp(-error - log(k)) + = exp(-(error + log(k))) + + So let's compute (error + log(k)) and exponentiate later + */ + error = error + gm->error(continuousValues); + + // Add the logNormalization constant to the error + // Also compute the mean for normalization (for numerical stability) + double mean = 0.0; + auto addConstant = [&gm, &mean](const double &error) { + double e = error + gm->logNormalizationConstant(); + mean += e; + return e; }; - AlgebraicDecisionTree model_selection = - DecisionTree(error, integrate); + error = error.apply(addConstant); + // Normalize by the mean + error = error.apply([&mean](double x) { return x / mean; }); - std::cout << "\n\nmodel selection"; - model_selection.print("", DefaultKeyFormatter); - discrete_fg.push_back( - DecisionTreeFactor(gm->discreteKeys(), model_selection)); + // Include the discrete keys + std::copy(gm->discreteKeys().begin(), gm->discreteKeys().end(), + std::inserter(discreteKeySet, discreteKeySet.end())); } } } + AlgebraicDecisionTree model_selection = DecisionTree( + error, [](const double &error) { return std::exp(-error); }); + + discrete_fg.push_back(DecisionTreeFactor( + DiscreteKeys(discreteKeySet.begin(), discreteKeySet.end()), + model_selection)); + // Solve for the MPE - discrete_fg.print(); DiscreteValues mpe = discrete_fg.optimize(); - mpe.print("mpe"); // Given the MPE, compute the optimal continuous values. return HybridValues(optimize(mpe), mpe); From 50670da07ceeb2947ca75d15cd3b52a6fd0adbb9 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 12 Dec 2023 05:57:23 -0500 Subject: [PATCH 007/398] HybridValues formatting --- gtsam/hybrid/HybridValues.h | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/gtsam/hybrid/HybridValues.h b/gtsam/hybrid/HybridValues.h index 40d4b5e92..09e7d773c 100644 --- a/gtsam/hybrid/HybridValues.h +++ b/gtsam/hybrid/HybridValues.h @@ -12,6 +12,7 @@ /** * @file HybridValues.h * @date Jul 28, 2022 + * @author Varun Agrawal * @author Shangjie Xue */ @@ -54,13 +55,13 @@ class GTSAM_EXPORT HybridValues { HybridValues() = default; /// Construct from DiscreteValues and VectorValues. - HybridValues(const VectorValues &cv, const DiscreteValues &dv) - : continuous_(cv), discrete_(dv){} + HybridValues(const VectorValues& cv, const DiscreteValues& dv) + : continuous_(cv), discrete_(dv) {} /// Construct from all values types. HybridValues(const VectorValues& cv, const DiscreteValues& dv, const Values& v) - : continuous_(cv), discrete_(dv), nonlinear_(v){} + : continuous_(cv), discrete_(dv), nonlinear_(v) {} /// @} /// @name Testable @@ -101,9 +102,7 @@ class GTSAM_EXPORT HybridValues { bool existsDiscrete(Key j) { return (discrete_.find(j) != discrete_.end()); } /// Check whether a variable with key \c j exists in values. - bool existsNonlinear(Key j) { - return nonlinear_.exists(j); - } + bool existsNonlinear(Key j) { return nonlinear_.exists(j); } /// Check whether a variable with key \c j exists. bool exists(Key j) { @@ -128,9 +127,7 @@ class GTSAM_EXPORT HybridValues { } /// insert_or_assign() , similar to Values.h - void insert_or_assign(Key j, size_t value) { - discrete_[j] = value; - } + void insert_or_assign(Key j, size_t value) { discrete_[j] = value; } /** Insert all continuous values from \c values. Throws an invalid_argument * exception if any keys to be inserted are already used. */ From af490e9ffcbab044637f9a9b1885cfc4e751332c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 12 Dec 2023 06:26:57 -0500 Subject: [PATCH 008/398] sum and normalize helper methods for the AlgebraicDecisionTree --- gtsam/discrete/AlgebraicDecisionTree.h | 19 +++++++++++++++++++ gtsam/hybrid/HybridBayesNet.cpp | 13 +++++++------ 2 files changed, 26 insertions(+), 6 deletions(-) diff --git a/gtsam/discrete/AlgebraicDecisionTree.h b/gtsam/discrete/AlgebraicDecisionTree.h index 9f55f3b63..1e5dc848b 100644 --- a/gtsam/discrete/AlgebraicDecisionTree.h +++ b/gtsam/discrete/AlgebraicDecisionTree.h @@ -196,6 +196,25 @@ namespace gtsam { return this->apply(g, &Ring::div); } + /// Compute sum of all values + double sum() const { + double sum = 0; + auto visitor = [&](int y) { sum += y; }; + this->visit(visitor); + return sum; + } + + /** + * @brief Helper method to perform normalization such that all leaves in the + * tree sum to 1 + * + * @param sum + * @return AlgebraicDecisionTree + */ + AlgebraicDecisionTree normalize(double sum) const { + return this->apply([&sum](const double& x) { return x / sum; }); + } + /** sum out variable */ AlgebraicDecisionTree sum(const L& label, size_t cardinality) const { return this->combine(label, cardinality, &Ring::add); diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index cd1157576..eea51d329 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -283,16 +283,17 @@ HybridValues HybridBayesNet::optimize() const { error = error + gm->error(continuousValues); // Add the logNormalization constant to the error - // Also compute the mean for normalization (for numerical stability) - double mean = 0.0; - auto addConstant = [&gm, &mean](const double &error) { + // Also compute the sum for discrete probability normalization + // (normalization trick for numerical stability) + double sum = 0.0; + auto addConstant = [&gm, &sum](const double &error) { double e = error + gm->logNormalizationConstant(); - mean += e; + sum += e; return e; }; error = error.apply(addConstant); - // Normalize by the mean - error = error.apply([&mean](double x) { return x / mean; }); + // Normalize by the sum + error = error.normalize(sum); // Include the discrete keys std::copy(gm->discreteKeys().begin(), gm->discreteKeys().end(), From c004bd8df0775bdb14f6df46334545cf1ac03b90 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 12 Dec 2023 19:15:24 -0500 Subject: [PATCH 009/398] test for differing covariances --- gtsam/hybrid/tests/testMixtureFactor.cpp | 70 ++++++++++++++++++++++++ 1 file changed, 70 insertions(+) diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index 67a7fd8ae..50bf2d4b4 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -18,6 +18,9 @@ #include #include +#include +#include +#include #include #include #include @@ -114,6 +117,73 @@ TEST(MixtureFactor, Dim) { EXPECT_LONGS_EQUAL(1, mixtureFactor.dim()); } +/* ************************************************************************* */ +// Test components with differing covariances +TEST(MixtureFactor, DifferentCovariances) { + DiscreteKey m1(M(1), 2); + + Values values; + double x1 = 1.0, x2 = 1.0; + values.insert(X(1), x1); + values.insert(X(2), x2); + + double between = 0.0; + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + + auto f0 = + std::make_shared>(X(1), X(2), between, model0); + auto f1 = + std::make_shared>(X(1), X(2), between, model1); + std::vector factors{f0, f1}; + + // Create via toFactorGraph + using symbol_shorthand::Z; + Matrix H0_1, H0_2, H1_1, H1_2; + Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); + std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H0_1 /*Sp1*/}, + {X(2), H0_2 /*Tp2*/}}; + + Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); + std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H1_1 /*Sp1*/}, + {X(2), H1_2 /*Tp2*/}}; + auto gm = new gtsam::GaussianMixture( + {Z(1)}, {X(1), X(2)}, {m1}, + {std::make_shared(terms0, 1, -d0, model0), + std::make_shared(terms1, 1, -d1, model1)}); + gtsam::HybridBayesNet bn2; + bn2.emplace_back(gm); + + gtsam::VectorValues measurements; + measurements.insert(Z(1), gtsam::Z_1x1); + // Create FG with single GaussianMixtureFactor + auto mixture_fg = bn2.toFactorGraph(measurements); + + // Linearized prior factor on X1 + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + mixture_fg.push_back(prior); + + auto hbn = mixture_fg.eliminateSequential(); + // hbn->print("\n\nfinal bayes net"); + + HybridValues actual_values = hbn->optimize(); + + VectorValues cv; + cv.insert(X(1), Vector1(0.0)); + cv.insert(X(2), Vector1(0.0)); + DiscreteValues dv; + dv.insert({M(1), 1}); + HybridValues expected_values(cv, dv); + + EXPECT(assert_equal(expected_values, actual_values)); +} + /* ************************************************************************* */ int main() { TestResult tr; From 7b56c96b430cf9d828016749c3299366a8a67b0e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 15 Dec 2023 15:19:48 -0500 Subject: [PATCH 010/398] differing means test --- gtsam/hybrid/tests/testMixtureFactor.cpp | 45 ++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index 50bf2d4b4..4bb716540 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -117,6 +117,51 @@ TEST(MixtureFactor, Dim) { EXPECT_LONGS_EQUAL(1, mixtureFactor.dim()); } +/* ************************************************************************* */ +// Test components with differing means +TEST(MixtureFactor, DifferentMeans) { + DiscreteKey m1(M(1), 2), m2(M(2), 2); + + Values values; + double x1 = 0.0, x2 = 1.75, x3 = 2.60; + values.insert(X(1), x1); + values.insert(X(2), x2); + values.insert(X(3), x3); + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e-0); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-0); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-0); + + auto f0 = std::make_shared>(X(1), X(2), 0.0, model0); + auto f1 = std::make_shared>(X(1), X(2), 2.0, model1); + std::vector factors{f0, f1}; + + MixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); + HybridNonlinearFactorGraph hnfg; + hnfg.push_back(mixtureFactor); + + f0 = std::make_shared>(X(2), X(3), 0.0, model0); + f1 = std::make_shared>(X(2), X(3), 2.0, model1); + std::vector factors23{f0, f1}; + hnfg.push_back(MixtureFactor({X(2), X(3)}, {m2}, factors23)); + + auto prior = PriorFactor(X(1), x1, prior_noise); + hnfg.push_back(prior); + + hnfg.emplace_shared>(X(2), 2.0, prior_noise); + + auto hgfg = hnfg.linearize(values); + auto bn = hgfg->eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{ + {X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}, {X(3), Vector1(-0.6)}}, + DiscreteValues{{M(1), 1}, {M(2), 0}}); + + EXPECT(assert_equal(expected, actual)); +} + /* ************************************************************************* */ // Test components with differing covariances TEST(MixtureFactor, DifferentCovariances) { From e549a9b41fb6980e4fb1fbcf89df40b927797d9c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 15 Dec 2023 15:21:25 -0500 Subject: [PATCH 011/398] normalize model selection term --- gtsam/hybrid/HybridBayesNet.cpp | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index eea51d329..f2cac485d 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -239,7 +239,7 @@ HybridValues HybridBayesNet::optimize() const { hence L(X;M,Z)P(X|M) is the unnormalized probabilty of the joint Gaussian distribution. - This can be computed by multiplying all the exponentiaed errors + This can be computed by multiplying all the exponentiated errors of each of the conditionals, which we do below in hybrid case. */ if (conditional->isContinuous()) { @@ -288,7 +288,7 @@ HybridValues HybridBayesNet::optimize() const { double sum = 0.0; auto addConstant = [&gm, &sum](const double &error) { double e = error + gm->logNormalizationConstant(); - sum += e; + sum += std::abs(e); return e; }; error = error.apply(addConstant); @@ -302,12 +302,17 @@ HybridValues HybridBayesNet::optimize() const { } } + double min_log = error.min(); AlgebraicDecisionTree model_selection = DecisionTree( - error, [](const double &error) { return std::exp(-error); }); + error, [&min_log](const double &x) { return std::exp(-(x - min_log)); }); + model_selection = model_selection + exp(-min_log); - discrete_fg.push_back(DecisionTreeFactor( - DiscreteKeys(discreteKeySet.begin(), discreteKeySet.end()), - model_selection)); + // Only add model_selection if we have discrete keys + if (discreteKeySet.size() > 0) { + discrete_fg.push_back(DecisionTreeFactor( + DiscreteKeys(discreteKeySet.begin(), discreteKeySet.end()), + model_selection)); + } // Solve for the MPE DiscreteValues mpe = discrete_fg.optimize(); From b2638c8698689545c0b0825a0a7fd3d1bd7fc0ba Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 17 Dec 2023 15:14:03 -0500 Subject: [PATCH 012/398] max and min functions for AlgebraicDecisionTree --- gtsam/discrete/AlgebraicDecisionTree.h | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/gtsam/discrete/AlgebraicDecisionTree.h b/gtsam/discrete/AlgebraicDecisionTree.h index 1e5dc848b..1a6358680 100644 --- a/gtsam/discrete/AlgebraicDecisionTree.h +++ b/gtsam/discrete/AlgebraicDecisionTree.h @@ -199,7 +199,7 @@ namespace gtsam { /// Compute sum of all values double sum() const { double sum = 0; - auto visitor = [&](int y) { sum += y; }; + auto visitor = [&](double y) { sum += y; }; this->visit(visitor); return sum; } @@ -215,6 +215,22 @@ namespace gtsam { return this->apply([&sum](const double& x) { return x / sum; }); } + /// Find the minimum values amongst all leaves + double min() const { + double min = std::numeric_limits::max(); + auto visitor = [&](double x) { min = x < min ? x : min; }; + this->visit(visitor); + return min; + } + + /// Find the maximum values amongst all leaves + double max() const { + double max = std::numeric_limits::min(); + auto visitor = [&](double x) { max = x > max ? x : max; }; + this->visit(visitor); + return max; + } + /** sum out variable */ AlgebraicDecisionTree sum(const L& label, size_t cardinality) const { return this->combine(label, cardinality, &Ring::add); From 6f09be51cbf145590b3ba545f59fdbc5aa9c1718 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 17 Dec 2023 18:55:15 -0500 Subject: [PATCH 013/398] error normalization and log-sum-exp trick --- gtsam/hybrid/HybridBayesNet.cpp | 7 ++++--- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 23 ++++++++++++++++------ 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index f2cac485d..e48b3faf7 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -303,9 +303,10 @@ HybridValues HybridBayesNet::optimize() const { } double min_log = error.min(); - AlgebraicDecisionTree model_selection = DecisionTree( - error, [&min_log](const double &x) { return std::exp(-(x - min_log)); }); - model_selection = model_selection + exp(-min_log); + AlgebraicDecisionTree model_selection = + DecisionTree(error, [&min_log](const double &x) { + return std::exp(-(x - min_log)) * exp(-min_log); + }); // Only add model_selection if we have discrete keys if (discreteKeySet.size() > 0) { diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 467cff710..dfff3d4f3 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -323,18 +323,29 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // If there are no more continuous parents, then we create a // DiscreteFactor here, with the error for each discrete choice. - // Integrate the probability mass in the last continuous conditional using - // the unnormalized probability q(μ;m) = exp(-error(μ;m)) at the mean. - // discrete_probability = exp(-error(μ;m)) * sqrt(det(2π Σ_m)) - auto probability = [&](const Result &pair) -> double { + // Compute the probability q(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m)) + // from the residual error at the mean μ. + // The residual error contains no keys, and only depends on the discrete + // separator if present. + auto logProbability = [&](const Result &pair) -> double { + // auto probability = [&](const Result &pair) -> double { static const VectorValues kEmpty; // If the factor is not null, it has no keys, just contains the residual. const auto &factor = pair.second; if (!factor) return 1.0; // TODO(dellaert): not loving this. - return exp(-factor->error(kEmpty)) / pair.first->normalizationConstant(); + + // exp(-factor->error(kEmpty)) / pair.first->normalizationConstant(); + return -factor->error(kEmpty) - pair.first->logNormalizationConstant(); }; - DecisionTree probabilities(eliminationResults, probability); + AlgebraicDecisionTree logProbabilities( + DecisionTree(eliminationResults, logProbability)); + + // Perform normalization + double max_log = logProbabilities.max(); + DecisionTree probabilities( + logProbabilities, + [&max_log](const double x) { return exp(x - max_log) * exp(max_log); }); return { std::make_shared(gaussianMixture), From 36604297d7db6f8a0c299f782a157c72f1a70cf9 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 18 Dec 2023 14:25:19 -0500 Subject: [PATCH 014/398] handle numerical instability --- gtsam/discrete/AlgebraicDecisionTree.h | 3 ++- gtsam/hybrid/HybridBayesNet.cpp | 28 ++++++++++----------- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 7 +++--- gtsam/hybrid/tests/testHybridEstimation.cpp | 1 - gtsam/linear/GaussianConditional.cpp | 6 +++-- 5 files changed, 24 insertions(+), 21 deletions(-) diff --git a/gtsam/discrete/AlgebraicDecisionTree.h b/gtsam/discrete/AlgebraicDecisionTree.h index 1a6358680..17385a975 100644 --- a/gtsam/discrete/AlgebraicDecisionTree.h +++ b/gtsam/discrete/AlgebraicDecisionTree.h @@ -225,7 +225,8 @@ namespace gtsam { /// Find the maximum values amongst all leaves double max() const { - double max = std::numeric_limits::min(); + // Get the most negative value + double max = -std::numeric_limits::max(); auto visitor = [&](double x) { max = x > max ? x : max; }; this->visit(visitor); return max; diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index e48b3faf7..2b0b11e36 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -274,26 +274,26 @@ HybridValues HybridBayesNet::optimize() const { q(mu; M, Z) * sqrt((2*pi)^n*det(Sigma)) If q(mu; M, Z) = exp(-error) & k = 1.0 / sqrt((2*pi)^n*det(Sigma)) - thus, q*sqrt(|2*pi*Sigma|) = q/k = exp(log(q/k)) + thus, q * sqrt((2*pi)^n*det(Sigma)) = q/k = exp(log(q/k)) = exp(log(q) - log(k)) = exp(-error - log(k)) = exp(-(error + log(k))) - So let's compute (error + log(k)) and exponentiate later + So we compute (error + log(k)) and exponentiate later */ - error = error + gm->error(continuousValues); - // Add the logNormalization constant to the error + // Add the error and the logNormalization constant to the error + auto err = gm->error(continuousValues) + gm->logNormalizationConstant(); + // Also compute the sum for discrete probability normalization // (normalization trick for numerical stability) double sum = 0.0; - auto addConstant = [&gm, &sum](const double &error) { - double e = error + gm->logNormalizationConstant(); + auto absSum = [&sum](const double &e) { sum += std::abs(e); return e; }; - error = error.apply(addConstant); - // Normalize by the sum - error = error.normalize(sum); + err.visit(absSum); + // Normalize by the sum to prevent overflow + error = error + err.normalize(sum); // Include the discrete keys std::copy(gm->discreteKeys().begin(), gm->discreteKeys().end(), @@ -302,11 +302,11 @@ HybridValues HybridBayesNet::optimize() const { } } - double min_log = error.min(); - AlgebraicDecisionTree model_selection = - DecisionTree(error, [&min_log](const double &x) { - return std::exp(-(x - min_log)) * exp(-min_log); - }); + error = error * -1; + double max_log = error.max(); + AlgebraicDecisionTree model_selection = DecisionTree( + error, [&max_log](const double &x) { return std::exp(x - max_log); }); + model_selection = model_selection.normalize(model_selection.sum()); // Only add model_selection if we have discrete keys if (discreteKeySet.size() > 0) { diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index dfff3d4f3..260dc6bbe 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -328,7 +328,6 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // The residual error contains no keys, and only depends on the discrete // separator if present. auto logProbability = [&](const Result &pair) -> double { - // auto probability = [&](const Result &pair) -> double { static const VectorValues kEmpty; // If the factor is not null, it has no keys, just contains the residual. const auto &factor = pair.second; @@ -343,9 +342,11 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // Perform normalization double max_log = logProbabilities.max(); - DecisionTree probabilities( + AlgebraicDecisionTree probabilities = DecisionTree( logProbabilities, - [&max_log](const double x) { return exp(x - max_log) * exp(max_log); }); + [&max_log](const double x) { return exp(x - max_log); }); + // probabilities.print("", DefaultKeyFormatter); + probabilities = probabilities.normalize(probabilities.sum()); return { std::make_shared(gaussianMixture), diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index b8edc39d8..1cc28b386 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -333,7 +333,6 @@ TEST(HybridEstimation, Probability) { for (auto discrete_conditional : *discreteBayesNet) { bayesNet->add(discrete_conditional); } - auto discreteConditional = discreteBayesNet->at(0)->asDiscrete(); HybridValues hybrid_values = bayesNet->optimize(); diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index 0112835aa..4ec1d8b95 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -184,8 +184,10 @@ namespace gtsam { double GaussianConditional::logNormalizationConstant() const { constexpr double log2pi = 1.8378770664093454835606594728112; size_t n = d().size(); - // log det(Sigma)) = - 2.0 * logDeterminant() - return - 0.5 * n * log2pi + logDeterminant(); + // Sigma = (R'R)^{-1}, det(Sigma) = det((R'R)^{-1}) = det(R'R)^{-1} + // log det(Sigma) = -log(det(R'R)) = -2*log(det(R)) + // Hence, log det(Sigma)) = - 2.0 * logDeterminant() + return -0.5 * n * log2pi + logDeterminant(); } /* ************************************************************************* */ From 07ddec5f4b4aac614bc1730022b130c99b59a944 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 21 Dec 2023 14:19:21 -0500 Subject: [PATCH 015/398] remove stray comment --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 1 - 1 file changed, 1 deletion(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 260dc6bbe..d63f8b5ff 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -345,7 +345,6 @@ hybridElimination(const HybridGaussianFactorGraph &factors, AlgebraicDecisionTree probabilities = DecisionTree( logProbabilities, [&max_log](const double x) { return exp(x - max_log); }); - // probabilities.print("", DefaultKeyFormatter); probabilities = probabilities.normalize(probabilities.sum()); return { From c6584f63cea0f2638d2a04a76573cf971ac75c2a Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 22 Dec 2023 14:55:31 -0500 Subject: [PATCH 016/398] minor test cleanup --- gtsam/hybrid/tests/testMixtureFactor.cpp | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index 4bb716540..4819458e8 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -202,20 +202,19 @@ TEST(MixtureFactor, DifferentCovariances) { {Z(1)}, {X(1), X(2)}, {m1}, {std::make_shared(terms0, 1, -d0, model0), std::make_shared(terms1, 1, -d1, model1)}); - gtsam::HybridBayesNet bn2; - bn2.emplace_back(gm); + gtsam::HybridBayesNet bn; + bn.emplace_back(gm); gtsam::VectorValues measurements; measurements.insert(Z(1), gtsam::Z_1x1); // Create FG with single GaussianMixtureFactor - auto mixture_fg = bn2.toFactorGraph(measurements); + HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); // Linearized prior factor on X1 auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); mixture_fg.push_back(prior); auto hbn = mixture_fg.eliminateSequential(); - // hbn->print("\n\nfinal bayes net"); HybridValues actual_values = hbn->optimize(); From ebcf958d691f1be867900bb5c0a45b24ad36eff9 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 25 Dec 2023 16:04:48 -0500 Subject: [PATCH 017/398] better, more correct version of model selection --- gtsam/hybrid/HybridBayesNet.cpp | 208 ++++++++++++++++++++++---------- 1 file changed, 142 insertions(+), 66 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 2b0b11e36..23a1c7787 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -26,6 +26,18 @@ static std::mt19937_64 kRandomNumberGenerator(42); namespace gtsam { +using std::dynamic_pointer_cast; + +/* ************************************************************************ */ +// Throw a runtime exception for method specified in string s, +// and conditional f: +static void throwRuntimeError(const std::string &s, + const std::shared_ptr &f) { + auto &fr = *f; + throw std::runtime_error(s + " not implemented for conditional type " + + demangle(typeid(fr).name()) + "."); +} + /* ************************************************************************* */ void HybridBayesNet::print(const std::string &s, const KeyFormatter &formatter) const { @@ -217,6 +229,56 @@ GaussianBayesNet HybridBayesNet::choose( return gbn; } +/* ************************************************************************ */ +static GaussianBayesNetTree addGaussian( + const GaussianBayesNetTree &gfgTree, + const GaussianConditional::shared_ptr &factor) { + // If the decision tree is not initialized, then initialize it. + if (gfgTree.empty()) { + GaussianBayesNet result{factor}; + return GaussianBayesNetTree(result); + } else { + auto add = [&factor](const GaussianBayesNet &graph) { + auto result = graph; + result.push_back(factor); + return result; + }; + return gfgTree.apply(add); + } +} + +/* ************************************************************************ */ +GaussianBayesNetTree HybridBayesNet::assembleTree() const { + GaussianBayesNetTree result; + + for (auto &f : factors_) { + // TODO(dellaert): just use a virtual method defined in HybridFactor. + if (auto gm = dynamic_pointer_cast(f)) { + result = gm->add(result); + } else if (auto hc = dynamic_pointer_cast(f)) { + if (auto gm = hc->asMixture()) { + result = gm->add(result); + } else if (auto g = hc->asGaussian()) { + result = addGaussian(result, g); + } else { + // Has to be discrete. + // TODO(dellaert): in C++20, we can use std::visit. + continue; + } + } else if (dynamic_pointer_cast(f)) { + // Don't do anything for discrete-only factors + // since we want to evaluate continuous values only. + continue; + } else { + // We need to handle the case where the object is actually an + // BayesTreeOrphanWrapper! + throwRuntimeError("HybridBayesNet::assembleTree", f); + } + } + + return result; +} + /* ************************************************************************* */ HybridValues HybridBayesNet::optimize() const { // Collect all the discrete factors to compute MPE @@ -227,74 +289,94 @@ HybridValues HybridBayesNet::optimize() const { AlgebraicDecisionTree error(0.0); std::set discreteKeySet; + // this->print(); + GaussianBayesNetTree bnTree = assembleTree(); + // bnTree.print("", DefaultKeyFormatter, [](const GaussianBayesNet &gbn) { + // gbn.print(); + // return ""; + // }); + /* + Perform the integration of L(X;M,Z)P(X|M) + which is the model selection term. + + By Bayes' rule, P(X|M,Z) ∝ L(X;M,Z)P(X|M), + hence L(X;M,Z)P(X|M) is the unnormalized probabilty of + the joint Gaussian distribution. + + This can be computed by multiplying all the exponentiated errors + of each of the conditionals, which we do below in hybrid case. + */ + /* + To perform model selection, we need: + q(mu; M, Z) * sqrt((2*pi)^n*det(Sigma)) + + If q(mu; M, Z) = exp(-error) & k = 1.0 / sqrt((2*pi)^n*det(Sigma)) + thus, q * sqrt((2*pi)^n*det(Sigma)) = q/k = exp(log(q/k)) + = exp(log(q) - log(k)) = exp(-error - log(k)) + = exp(-(error + log(k))) + + So we compute (error + log(k)) and exponentiate later + */ + // Compute the X* of each assignment and use that as the MAP. + DecisionTree x_map( + bnTree, [](const GaussianBayesNet &gbn) { return gbn.optimize(); }); + + // Only compute logNormalizationConstant for now + AlgebraicDecisionTree log_norm_constants = + DecisionTree(bnTree, [](const GaussianBayesNet &gbn) { + if (gbn.size() == 0) { + return -std::numeric_limits::max(); + } + return -gbn.logNormalizationConstant(); + }); + // Compute unnormalized error term and compute model selection term + AlgebraicDecisionTree model_selection_term = log_norm_constants.apply( + [this, &x_map](const Assignment &assignment, double x) { + double error = 0.0; + for (auto &&f : *this) { + if (auto gm = dynamic_pointer_cast(f)) { + error += gm->error( + HybridValues(x_map(assignment), DiscreteValues(assignment))); + } else if (auto hc = dynamic_pointer_cast(f)) { + if (auto gm = hc->asMixture()) { + error += gm->error( + HybridValues(x_map(assignment), DiscreteValues(assignment))); + } else if (auto g = hc->asGaussian()) { + error += g->error(x_map(assignment)); + } + } + } + return -(error + x); + }); + // model_selection_term.print("", DefaultKeyFormatter); + + double max_log = model_selection_term.max(); + AlgebraicDecisionTree model_selection = DecisionTree( + model_selection_term, + [&max_log](const double &x) { return std::exp(x - max_log); }); + model_selection = model_selection.normalize(model_selection.sum()); + // std::cout << "normalized model selection" << std::endl; + // model_selection.print("", DefaultKeyFormatter); + for (auto &&conditional : *this) { if (conditional->isDiscrete()) { discrete_fg.push_back(conditional->asDiscrete()); } else { - /* - Perform the integration of L(X;M,Z)P(X|M) - which is the model selection term. - - By Bayes' rule, P(X|M) ∝ L(X;M,Z)P(X|M), - hence L(X;M,Z)P(X|M) is the unnormalized probabilty of - the joint Gaussian distribution. - - This can be computed by multiplying all the exponentiated errors - of each of the conditionals, which we do below in hybrid case. - */ if (conditional->isContinuous()) { - /* - If we are here, it means there are no discrete variables in - the Bayes net (due to strong elimination ordering). - This is a continuous-only problem hence model selection doesn't matter. - */ - auto gc = conditional->asGaussian(); - for (GaussianConditional::const_iterator frontal = gc->beginFrontals(); - frontal != gc->endFrontals(); ++frontal) { - continuousValues.insert_or_assign(*frontal, - Vector::Zero(gc->getDim(frontal))); - } + // /* + // If we are here, it means there are no discrete variables in + // the Bayes net (due to strong elimination ordering). + // This is a continuous-only problem hence model selection doesn't matter. + // */ + // auto gc = conditional->asGaussian(); + // for (GaussianConditional::const_iterator frontal = gc->beginFrontals(); + // frontal != gc->endFrontals(); ++frontal) { + // continuousValues.insert_or_assign(*frontal, + // Vector::Zero(gc->getDim(frontal))); + // } } else if (conditional->isHybrid()) { auto gm = conditional->asMixture(); - gm->conditionals().apply( - [&continuousValues](const GaussianConditional::shared_ptr &gc) { - if (gc) { - for (GaussianConditional::const_iterator frontal = gc->begin(); - frontal != gc->end(); ++frontal) { - continuousValues.insert_or_assign( - *frontal, Vector::Zero(gc->getDim(frontal))); - } - } - return gc; - }); - - /* - To perform model selection, we need: - q(mu; M, Z) * sqrt((2*pi)^n*det(Sigma)) - - If q(mu; M, Z) = exp(-error) & k = 1.0 / sqrt((2*pi)^n*det(Sigma)) - thus, q * sqrt((2*pi)^n*det(Sigma)) = q/k = exp(log(q/k)) - = exp(log(q) - log(k)) = exp(-error - log(k)) - = exp(-(error + log(k))) - - So we compute (error + log(k)) and exponentiate later - */ - - // Add the error and the logNormalization constant to the error - auto err = gm->error(continuousValues) + gm->logNormalizationConstant(); - - // Also compute the sum for discrete probability normalization - // (normalization trick for numerical stability) - double sum = 0.0; - auto absSum = [&sum](const double &e) { - sum += std::abs(e); - return e; - }; - err.visit(absSum); - // Normalize by the sum to prevent overflow - error = error + err.normalize(sum); - // Include the discrete keys std::copy(gm->discreteKeys().begin(), gm->discreteKeys().end(), std::inserter(discreteKeySet, discreteKeySet.end())); @@ -302,12 +384,6 @@ HybridValues HybridBayesNet::optimize() const { } } - error = error * -1; - double max_log = error.max(); - AlgebraicDecisionTree model_selection = DecisionTree( - error, [&max_log](const double &x) { return std::exp(x - max_log); }); - model_selection = model_selection.normalize(model_selection.sum()); - // Only add model_selection if we have discrete keys if (discreteKeySet.size() > 0) { discrete_fg.push_back(DecisionTreeFactor( From 1e298be3b3b09dbb996d40c8d2b7d0997f024508 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 25 Dec 2023 18:47:44 -0500 Subject: [PATCH 018/398] Better way of handling assignments --- gtsam/hybrid/HybridBayesNet.cpp | 76 +++++++++++++++++++-------------- 1 file changed, 43 insertions(+), 33 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 23a1c7787..89be86056 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -285,8 +285,6 @@ HybridValues HybridBayesNet::optimize() const { DiscreteFactorGraph discrete_fg; VectorValues continuousValues; - // Error values for each hybrid factor - AlgebraicDecisionTree error(0.0); std::set discreteKeySet; // this->print(); @@ -313,7 +311,8 @@ HybridValues HybridBayesNet::optimize() const { If q(mu; M, Z) = exp(-error) & k = 1.0 / sqrt((2*pi)^n*det(Sigma)) thus, q * sqrt((2*pi)^n*det(Sigma)) = q/k = exp(log(q/k)) = exp(log(q) - log(k)) = exp(-error - log(k)) - = exp(-(error + log(k))) + = exp(-(error + log(k))), + where error is computed at the corresponding MAP point, gbn.error(mu). So we compute (error + log(k)) and exponentiate later */ @@ -325,29 +324,45 @@ HybridValues HybridBayesNet::optimize() const { AlgebraicDecisionTree log_norm_constants = DecisionTree(bnTree, [](const GaussianBayesNet &gbn) { if (gbn.size() == 0) { - return -std::numeric_limits::max(); + return 0.0; } - return -gbn.logNormalizationConstant(); + return gbn.logNormalizationConstant(); }); - // Compute unnormalized error term and compute model selection term - AlgebraicDecisionTree model_selection_term = log_norm_constants.apply( - [this, &x_map](const Assignment &assignment, double x) { - double error = 0.0; - for (auto &&f : *this) { - if (auto gm = dynamic_pointer_cast(f)) { - error += gm->error( - HybridValues(x_map(assignment), DiscreteValues(assignment))); - } else if (auto hc = dynamic_pointer_cast(f)) { - if (auto gm = hc->asMixture()) { - error += gm->error( - HybridValues(x_map(assignment), DiscreteValues(assignment))); - } else if (auto g = hc->asGaussian()) { - error += g->error(x_map(assignment)); - } - } + + // Compute unnormalized error term + std::vector labels; + for (auto &&key : x_map.labels()) { + labels.push_back(std::make_pair(key, 2)); + } + + std::vector errors; + x_map.visitWith([this, &errors](const Assignment &assignment, + const VectorValues &mu) { + double error = 0.0; + for (auto &&f : *this) { + if (auto gm = dynamic_pointer_cast(f)) { + error += gm->error(HybridValues(mu, DiscreteValues(assignment))); + } else if (auto hc = dynamic_pointer_cast(f)) { + if (auto gm = hc->asMixture()) { + error += gm->error(HybridValues(mu, DiscreteValues(assignment))); + } else if (auto g = hc->asGaussian()) { + error += g->error(mu); } - return -(error + x); + } + } + errors.push_back(error); + }); + + AlgebraicDecisionTree errorTree = + DecisionTree(labels, errors); + + // Compute model selection term + AlgebraicDecisionTree model_selection_term = errorTree.apply( + [&log_norm_constants](const Assignment assignment, double err) { + return -(err + log_norm_constants(assignment)); }); + + // std::cout << "model selection term" << std::endl; // model_selection_term.print("", DefaultKeyFormatter); double max_log = model_selection_term.max(); @@ -355,6 +370,7 @@ HybridValues HybridBayesNet::optimize() const { model_selection_term, [&max_log](const double &x) { return std::exp(x - max_log); }); model_selection = model_selection.normalize(model_selection.sum()); + // std::cout << "normalized model selection" << std::endl; // model_selection.print("", DefaultKeyFormatter); @@ -363,17 +379,11 @@ HybridValues HybridBayesNet::optimize() const { discrete_fg.push_back(conditional->asDiscrete()); } else { if (conditional->isContinuous()) { - // /* - // If we are here, it means there are no discrete variables in - // the Bayes net (due to strong elimination ordering). - // This is a continuous-only problem hence model selection doesn't matter. - // */ - // auto gc = conditional->asGaussian(); - // for (GaussianConditional::const_iterator frontal = gc->beginFrontals(); - // frontal != gc->endFrontals(); ++frontal) { - // continuousValues.insert_or_assign(*frontal, - // Vector::Zero(gc->getDim(frontal))); - // } + /* + If we are here, it means there are no discrete variables in + the Bayes net (due to strong elimination ordering). + This is a continuous-only problem hence model selection doesn't matter. + */ } else if (conditional->isHybrid()) { auto gm = conditional->asMixture(); From b4f07a01629f5ccb8d9413a82c9b9453627be28d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 25 Dec 2023 23:11:49 -0500 Subject: [PATCH 019/398] cleaner model selection computation --- gtsam/hybrid/HybridBayesNet.cpp | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 89be86056..90951b074 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -342,9 +342,11 @@ HybridValues HybridBayesNet::optimize() const { for (auto &&f : *this) { if (auto gm = dynamic_pointer_cast(f)) { error += gm->error(HybridValues(mu, DiscreteValues(assignment))); + } else if (auto hc = dynamic_pointer_cast(f)) { if (auto gm = hc->asMixture()) { error += gm->error(HybridValues(mu, DiscreteValues(assignment))); + } else if (auto g = hc->asGaussian()) { error += g->error(mu); } @@ -356,11 +358,9 @@ HybridValues HybridBayesNet::optimize() const { AlgebraicDecisionTree errorTree = DecisionTree(labels, errors); - // Compute model selection term - AlgebraicDecisionTree model_selection_term = errorTree.apply( - [&log_norm_constants](const Assignment assignment, double err) { - return -(err + log_norm_constants(assignment)); - }); + // Compute model selection term (with help from ADT methods) + AlgebraicDecisionTree model_selection_term = + (errorTree + log_norm_constants) * -1; // std::cout << "model selection term" << std::endl; // model_selection_term.print("", DefaultKeyFormatter); From 6f4343ca94aba06aff18fb3c019d18860af8f219 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 26 Dec 2023 00:20:44 -0500 Subject: [PATCH 020/398] almost working --- gtsam/hybrid/HybridBayesNet.cpp | 45 +++++++++++++++------------------ 1 file changed, 20 insertions(+), 25 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 90951b074..0ff4e342b 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -329,34 +329,29 @@ HybridValues HybridBayesNet::optimize() const { return gbn.logNormalizationConstant(); }); - // Compute unnormalized error term - std::vector labels; - for (auto &&key : x_map.labels()) { - labels.push_back(std::make_pair(key, 2)); - } + // Compute errors as VectorValues + DecisionTree errorVectors = x_map.apply( + [this](const Assignment &assignment, const VectorValues &mu) { + double error = 0.0; + for (auto &&f : *this) { + if (auto gm = dynamic_pointer_cast(f)) { + error += gm->error(HybridValues(mu, DiscreteValues(assignment))); - std::vector errors; - x_map.visitWith([this, &errors](const Assignment &assignment, - const VectorValues &mu) { - double error = 0.0; - for (auto &&f : *this) { - if (auto gm = dynamic_pointer_cast(f)) { - error += gm->error(HybridValues(mu, DiscreteValues(assignment))); + } else if (auto hc = dynamic_pointer_cast(f)) { + if (auto gm = hc->asMixture()) { + error += gm->error(HybridValues(mu, DiscreteValues(assignment))); - } else if (auto hc = dynamic_pointer_cast(f)) { - if (auto gm = hc->asMixture()) { - error += gm->error(HybridValues(mu, DiscreteValues(assignment))); - - } else if (auto g = hc->asGaussian()) { - error += g->error(mu); + } else if (auto g = hc->asGaussian()) { + error += g->error(mu); + } + } } - } - } - errors.push_back(error); - }); - - AlgebraicDecisionTree errorTree = - DecisionTree(labels, errors); + VectorValues e; + e.insert(0, Vector1(error)); + return e; + }); + AlgebraicDecisionTree errorTree = DecisionTree( + errorVectors, [](const VectorValues &v) { return v[0](0); }); // Compute model selection term (with help from ADT methods) AlgebraicDecisionTree model_selection_term = From 409938f4b493fde3c2f7fe84c6ba3b914a115d5a Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 26 Dec 2023 16:33:43 -0500 Subject: [PATCH 021/398] improved model selection code --- gtsam/hybrid/HybridBayesNet.cpp | 68 +++++++++++++-------------------- gtsam/hybrid/HybridBayesNet.h | 2 + gtsam/hybrid/HybridFactor.h | 8 ++++ 3 files changed, 37 insertions(+), 41 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 0ff4e342b..027bd75d4 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -248,7 +248,7 @@ static GaussianBayesNetTree addGaussian( } /* ************************************************************************ */ -GaussianBayesNetTree HybridBayesNet::assembleTree() const { +GaussianBayesNetValTree HybridBayesNet::assembleTree() const { GaussianBayesNetTree result; for (auto &f : factors_) { @@ -276,23 +276,17 @@ GaussianBayesNetTree HybridBayesNet::assembleTree() const { } } - return result; + GaussianBayesNetValTree resultTree(result, [](const GaussianBayesNet &gbn) { + return std::make_pair(gbn, 0.0); + }); + return resultTree; } /* ************************************************************************* */ HybridValues HybridBayesNet::optimize() const { // Collect all the discrete factors to compute MPE DiscreteFactorGraph discrete_fg; - VectorValues continuousValues; - std::set discreteKeySet; - - // this->print(); - GaussianBayesNetTree bnTree = assembleTree(); - // bnTree.print("", DefaultKeyFormatter, [](const GaussianBayesNet &gbn) { - // gbn.print(); - // return ""; - // }); /* Perform the integration of L(X;M,Z)P(X|M) which is the model selection term. @@ -316,43 +310,35 @@ HybridValues HybridBayesNet::optimize() const { So we compute (error + log(k)) and exponentiate later */ - // Compute the X* of each assignment and use that as the MAP. - DecisionTree x_map( - bnTree, [](const GaussianBayesNet &gbn) { return gbn.optimize(); }); - // Only compute logNormalizationConstant for now - AlgebraicDecisionTree log_norm_constants = - DecisionTree(bnTree, [](const GaussianBayesNet &gbn) { + std::set discreteKeySet; + GaussianBayesNetValTree bnTree = assembleTree(); + + GaussianBayesNetValTree bn_error = bnTree.apply( + [this](const Assignment &assignment, + const std::pair &gbnAndValue) { + // Compute the X* of each assignment + VectorValues mu = gbnAndValue.first.optimize(); + // Compute the error for X* and the assignment + double error = + this->error(HybridValues(mu, DiscreteValues(assignment))); + + return std::make_pair(gbnAndValue.first, error); + }); + + auto trees = unzip(bn_error); + AlgebraicDecisionTree errorTree = trees.second; + + // Only compute logNormalizationConstant + AlgebraicDecisionTree log_norm_constants = DecisionTree( + bnTree, [](const std::pair &gbnAndValue) { + GaussianBayesNet gbn = gbnAndValue.first; if (gbn.size() == 0) { return 0.0; } return gbn.logNormalizationConstant(); }); - // Compute errors as VectorValues - DecisionTree errorVectors = x_map.apply( - [this](const Assignment &assignment, const VectorValues &mu) { - double error = 0.0; - for (auto &&f : *this) { - if (auto gm = dynamic_pointer_cast(f)) { - error += gm->error(HybridValues(mu, DiscreteValues(assignment))); - - } else if (auto hc = dynamic_pointer_cast(f)) { - if (auto gm = hc->asMixture()) { - error += gm->error(HybridValues(mu, DiscreteValues(assignment))); - - } else if (auto g = hc->asGaussian()) { - error += g->error(mu); - } - } - } - VectorValues e; - e.insert(0, Vector1(error)); - return e; - }); - AlgebraicDecisionTree errorTree = DecisionTree( - errorVectors, [](const VectorValues &v) { return v[0](0); }); - // Compute model selection term (with help from ADT methods) AlgebraicDecisionTree model_selection_term = (errorTree + log_norm_constants) * -1; diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 22e03bba9..f0cdbaaf9 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -118,6 +118,8 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { return evaluate(values); } + GaussianBayesNetValTree assembleTree() const; + /** * @brief Solve the HybridBayesNet by first computing the MPE of all the * discrete variables and then optimizing the continuous variables based on diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index afd1c8032..8828a9172 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -33,6 +33,14 @@ class HybridValues; /// Alias for DecisionTree of GaussianFactorGraphs using GaussianFactorGraphTree = DecisionTree; +/// Alias for DecisionTree of GaussianBayesNets +using GaussianBayesNetTree = DecisionTree; +/** + * Alias for DecisionTree of (GaussianBayesNet, double) pairs. + * Used for model selection in BayesNet::optimize + */ +using GaussianBayesNetValTree = + DecisionTree>; KeyVector CollectKeys(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys); From 93c824c482babdb48cb7c856d906757c57841d3f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 27 Dec 2023 15:34:25 -0500 Subject: [PATCH 022/398] overload == operator for GaussianBayesNet and VectorValues --- gtsam/linear/GaussianBayesNet.h | 6 ++++++ gtsam/linear/VectorValues.h | 5 +++++ 2 files changed, 11 insertions(+) diff --git a/gtsam/linear/GaussianBayesNet.h b/gtsam/linear/GaussianBayesNet.h index 19781d1e6..d4d60845d 100644 --- a/gtsam/linear/GaussianBayesNet.h +++ b/gtsam/linear/GaussianBayesNet.h @@ -82,6 +82,12 @@ namespace gtsam { /** Check equality */ bool equals(const This& bn, double tol = 1e-9) const; + /// Check exact equality. + friend bool operator==(const GaussianBayesNet& lhs, + const GaussianBayesNet& rhs) { + return lhs.isEqual(rhs); + } + /// print graph void print( const std::string& s = "", diff --git a/gtsam/linear/VectorValues.h b/gtsam/linear/VectorValues.h index 478cfd770..865fa3c8a 100644 --- a/gtsam/linear/VectorValues.h +++ b/gtsam/linear/VectorValues.h @@ -260,6 +260,11 @@ namespace gtsam { /** equals required by Testable for unit testing */ bool equals(const VectorValues& x, double tol = 1e-9) const; + /// Check exact equality. + friend bool operator==(const VectorValues& lhs, const VectorValues& rhs) { + return lhs.equals(rhs); + } + /// @{ /// @name Advanced Interface /// @{ From b20d33d79e8aaac6c0cbc76dcee4ba35180c93cb Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 27 Dec 2023 15:38:30 -0500 Subject: [PATCH 023/398] logNormalizationConstant() for GaussianBayesNet --- gtsam/linear/GaussianBayesNet.cpp | 20 ++++++++++++++++++++ gtsam/linear/GaussianBayesNet.h | 8 ++++++++ 2 files changed, 28 insertions(+) diff --git a/gtsam/linear/GaussianBayesNet.cpp b/gtsam/linear/GaussianBayesNet.cpp index b04878ac5..861e19cc9 100644 --- a/gtsam/linear/GaussianBayesNet.cpp +++ b/gtsam/linear/GaussianBayesNet.cpp @@ -243,5 +243,25 @@ namespace gtsam { } /* ************************************************************************* */ + double GaussianBayesNet::logNormalizationConstant() const { + /* + normalization constant = 1.0 / sqrt((2*pi)^n*det(Sigma)) + logConstant = -0.5 * n*log(2*pi) - 0.5 * log det(Sigma) + + log det(Sigma)) = -2.0 * logDeterminant() + thus, logConstant = -0.5*n*log(2*pi) + logDeterminant() + + BayesNet logConstant = sum(-0.5*n_i*log(2*pi) + logDeterminant_i()) + = sum(-0.5*n_i*log(2*pi)) + sum(logDeterminant_i()) + = sum(-0.5*n_i*log(2*pi)) + bn->logDeterminant() + */ + double logNormConst = 0.0; + for (const sharedConditional& cg : *this) { + logNormConst += cg->logNormalizationConstant(); + } + return logNormConst; + } + + /* ************************************************************************* */ } // namespace gtsam diff --git a/gtsam/linear/GaussianBayesNet.h b/gtsam/linear/GaussianBayesNet.h index d4d60845d..ea1cb8603 100644 --- a/gtsam/linear/GaussianBayesNet.h +++ b/gtsam/linear/GaussianBayesNet.h @@ -234,6 +234,14 @@ namespace gtsam { * @return The determinant */ double logDeterminant() const; + /** + * @brief Get the log of the normalization constant corresponding to the + * joint Gaussian density represented by this Bayes net. + * + * @return double + */ + double logNormalizationConstant() const; + /** * Backsubstitute with a different RHS vector than the one stored in this BayesNet. * gy=inv(R*inv(Sigma))*gx From 3a89653e91b9c4fd09d047fef860bf5c3f9fb794 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 27 Dec 2023 15:45:35 -0500 Subject: [PATCH 024/398] helper methods in GaussianMixture for model selection --- gtsam/hybrid/GaussianMixture.cpp | 55 ++++++++++++++++++++++++++++++-- gtsam/hybrid/GaussianMixture.h | 15 +++++++++ 2 files changed, 68 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index a1ba167f5..61b40e566 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -24,6 +24,7 @@ #include #include #include +#include #include namespace gtsam { @@ -92,6 +93,34 @@ GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { return {conditionals_, wrap}; } +/* *******************************************************************************/ +GaussianBayesNetTree GaussianMixture::add( + const GaussianBayesNetTree &sum) const { + using Y = GaussianBayesNet; + auto add = [](const Y &graph1, const Y &graph2) { + auto result = graph1; + if (graph2.size() == 0) { + return GaussianBayesNet(); + } + result.push_back(graph2); + return result; + }; + const auto tree = asGaussianBayesNetTree(); + return sum.empty() ? tree : sum.apply(tree, add); +} + +/* *******************************************************************************/ +GaussianBayesNetTree GaussianMixture::asGaussianBayesNetTree() const { + auto wrap = [](const GaussianConditional::shared_ptr &gc) { + if (gc) { + return GaussianBayesNet{gc}; + } else { + return GaussianBayesNet(); + } + }; + return {conditionals_, wrap}; +} + /* *******************************************************************************/ size_t GaussianMixture::nrComponents() const { size_t total = 0; @@ -332,10 +361,32 @@ AlgebraicDecisionTree GaussianMixture::error( /* *******************************************************************************/ double GaussianMixture::error(const HybridValues &values) const { + // Check if discrete keys in discrete assignment are + // present in the GaussianMixture + KeyVector dKeys = this->discreteKeys_.indices(); + bool valid_assignment = false; + for (auto &&kv : values.discrete()) { + if (std::find(dKeys.begin(), dKeys.end(), kv.first) != dKeys.end()) { + valid_assignment = true; + break; + } + } + + // The discrete assignment is not valid so we return 0.0 erorr. + if (!valid_assignment) { + return 0.0; + } + // Directly index to get the conditional, no need to build the whole tree. auto conditional = conditionals_(values.discrete()); - return conditional->error(values.continuous()) + // - logConstant_ - conditional->logNormalizationConstant(); + if (conditional) { + return conditional->error(values.continuous()) + // + logConstant_ - conditional->logNormalizationConstant(); + } else { + // If not valid, pointer, it means this conditional was pruned, + // so we return maximum error. + return std::numeric_limits::max(); + } } /* *******************************************************************************/ diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index 0b68fcfd0..a52cd6c82 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -71,6 +71,12 @@ class GTSAM_EXPORT GaussianMixture */ GaussianFactorGraphTree asGaussianFactorGraphTree() const; + /** + * @brief Convert a DecisionTree of conditionals into + * a DT of Gaussian Bayes nets. + */ + GaussianBayesNetTree asGaussianBayesNetTree() const; + /** * @brief Helper function to get the pruner functor. * @@ -248,6 +254,15 @@ class GTSAM_EXPORT GaussianMixture * @return GaussianFactorGraphTree */ GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const; + + /** + * @brief Merge the Gaussian Bayes Nets in `this` and `sum` while + * maintaining the decision tree structure. + * + * @param sum Decision Tree of Gaussian Bayes Nets + * @return GaussianBayesNetTree + */ + GaussianBayesNetTree add(const GaussianBayesNetTree &sum) const; /// @} private: From 6f66d04f1425219f51b284c4b4094a15dc2a5791 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 27 Dec 2023 15:46:31 -0500 Subject: [PATCH 025/398] handle pruning in model selection --- gtsam/hybrid/HybridBayesNet.cpp | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 027bd75d4..0352d7962 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -319,6 +319,13 @@ HybridValues HybridBayesNet::optimize() const { const std::pair &gbnAndValue) { // Compute the X* of each assignment VectorValues mu = gbnAndValue.first.optimize(); + + // mu is empty if gbn had nullptrs + if (mu.size() == 0) { + return std::make_pair(gbnAndValue.first, + std::numeric_limits::max()); + } + // Compute the error for X* and the assignment double error = this->error(HybridValues(mu, DiscreteValues(assignment))); @@ -343,18 +350,12 @@ HybridValues HybridBayesNet::optimize() const { AlgebraicDecisionTree model_selection_term = (errorTree + log_norm_constants) * -1; - // std::cout << "model selection term" << std::endl; - // model_selection_term.print("", DefaultKeyFormatter); - double max_log = model_selection_term.max(); AlgebraicDecisionTree model_selection = DecisionTree( model_selection_term, [&max_log](const double &x) { return std::exp(x - max_log); }); model_selection = model_selection.normalize(model_selection.sum()); - // std::cout << "normalized model selection" << std::endl; - // model_selection.print("", DefaultKeyFormatter); - for (auto &&conditional : *this) { if (conditional->isDiscrete()) { discrete_fg.push_back(conditional->asDiscrete()); From 0d058100e57d187de3b7c4638addf21da1c8158b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 3 Jan 2024 14:44:12 -0500 Subject: [PATCH 026/398] update wrapper for LM with Ordering parameter --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 2 ++ gtsam/nonlinear/nonlinear.i | 11 ++++++++--- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index d63f8b5ff..522dc626b 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -333,6 +333,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, const auto &factor = pair.second; if (!factor) return 1.0; // TODO(dellaert): not loving this. + // Logspace version of: // exp(-factor->error(kEmpty)) / pair.first->normalizationConstant(); return -factor->error(kEmpty) - pair.first->logNormalizationConstant(); }; @@ -345,6 +346,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, AlgebraicDecisionTree probabilities = DecisionTree( logProbabilities, [&max_log](const double x) { return exp(x - max_log); }); + // probabilities.print("", DefaultKeyFormatter); probabilities = probabilities.normalize(probabilities.sum()); return { diff --git a/gtsam/nonlinear/nonlinear.i b/gtsam/nonlinear/nonlinear.i index 3f5fb1dd5..113f08e0f 100644 --- a/gtsam/nonlinear/nonlinear.i +++ b/gtsam/nonlinear/nonlinear.i @@ -378,17 +378,22 @@ typedef gtsam::GncOptimizer> G #include virtual class LevenbergMarquardtOptimizer : gtsam::NonlinearOptimizer { LevenbergMarquardtOptimizer(const gtsam::NonlinearFactorGraph& graph, - const gtsam::Values& initialValues); + const gtsam::Values& initialValues, + const gtsam::LevenbergMarquardtParams& params = + gtsam::LevenbergMarquardtParams()); LevenbergMarquardtOptimizer(const gtsam::NonlinearFactorGraph& graph, const gtsam::Values& initialValues, - const gtsam::LevenbergMarquardtParams& params); + const gtsam::Ordering& ordering, + const gtsam::LevenbergMarquardtParams& params = + gtsam::LevenbergMarquardtParams()); + double lambda() const; void print(string s = "") const; }; #include class ISAM2GaussNewtonParams { - ISAM2GaussNewtonParams(); + ISAM2GaussNewtonParams(double _wildfireThreshold = 0.001); void print(string s = "") const; From 651f99925b98a0bd5cb531927af3f3ef7f9c6af2 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 3 Jan 2024 16:13:02 -0500 Subject: [PATCH 027/398] print logNormalizationConstant for Gaussian conditionals --- gtsam/hybrid/GaussianMixture.cpp | 2 ++ .../tests/testHybridNonlinearFactorGraph.cpp | 16 ++++++++++++++++ gtsam/linear/GaussianConditional.cpp | 1 + gtsam/linear/tests/testGaussianConditional.cpp | 3 +++ 4 files changed, 22 insertions(+) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 61b40e566..b7840429b 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -174,6 +174,8 @@ void GaussianMixture::print(const std::string &s, std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), "; } std::cout << "\n"; + std::cout << " logNormalizationConstant: " << logConstant_ << "\n" + << std::endl; conditionals_.print( "", [&](Key k) { return formatter(k); }, [&](const GaussianConditional::shared_ptr &gf) -> std::string { diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 93081d309..1dcd0bbb1 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -675,33 +675,41 @@ factor 6: P( m1 | m0 ): size: 3 conditional 0: Hybrid P( x0 | x1 m0) Discrete Keys = (m0, 2), + logNormalizationConstant: 1.38862 + Choice(m0) 0 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.85087 ] + logNormalizationConstant: 1.38862 No noise model 1 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.95037 ] + logNormalizationConstant: 1.38862 No noise model conditional 1: Hybrid P( x1 | x2 m0 m1) Discrete Keys = (m0, 2), (m1, 2), + logNormalizationConstant: 1.3935 + Choice(m1) 0 Choice(m0) 0 0 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.99901 ] + logNormalizationConstant: 1.3935 No noise model 0 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.90098 ] + logNormalizationConstant: 1.3935 No noise model 1 Choice(m0) @@ -709,16 +717,20 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10.098 ] + logNormalizationConstant: 1.3935 No noise model 1 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10 ] + logNormalizationConstant: 1.3935 No noise model conditional 2: Hybrid P( x2 | m0 m1) Discrete Keys = (m0, 2), (m1, 2), + logNormalizationConstant: 1.38857 + Choice(m1) 0 Choice(m0) 0 0 Leaf p(x2) @@ -726,6 +738,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1489 ] mean: 1 elements x2: -1.0099 + logNormalizationConstant: 1.38857 No noise model 0 1 Leaf p(x2) @@ -733,6 +746,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1479 ] mean: 1 elements x2: -1.0098 + logNormalizationConstant: 1.38857 No noise model 1 Choice(m0) @@ -741,6 +755,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0504 ] mean: 1 elements x2: -1.0001 + logNormalizationConstant: 1.38857 No noise model 1 1 Leaf p(x2) @@ -748,6 +763,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0494 ] mean: 1 elements x2: -1 + logNormalizationConstant: 1.38857 No noise model )"; diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index 4ec1d8b95..fb7058282 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -121,6 +121,7 @@ namespace gtsam { const auto mean = solve({}); // solve for mean. mean.print(" mean", formatter); } + cout << " logNormalizationConstant: " << logNormalizationConstant() << std::endl; if (model_) model_->print(" Noise model: "); else diff --git a/gtsam/linear/tests/testGaussianConditional.cpp b/gtsam/linear/tests/testGaussianConditional.cpp index a4a722012..dcd821889 100644 --- a/gtsam/linear/tests/testGaussianConditional.cpp +++ b/gtsam/linear/tests/testGaussianConditional.cpp @@ -516,6 +516,7 @@ TEST(GaussianConditional, Print) { " d = [ 20 40 ]\n" " mean: 1 elements\n" " x0: 20 40\n" + " logNormalizationConstant: -4.0351\n" "isotropic dim=2 sigma=3\n"; EXPECT(assert_print_equal(expected, conditional, "GaussianConditional")); @@ -530,6 +531,7 @@ TEST(GaussianConditional, Print) { " S[x1] = [ -1 -2 ]\n" " [ -3 -4 ]\n" " d = [ 20 40 ]\n" + " logNormalizationConstant: -4.0351\n" "isotropic dim=2 sigma=3\n"; EXPECT(assert_print_equal(expected1, conditional1, "GaussianConditional")); @@ -545,6 +547,7 @@ TEST(GaussianConditional, Print) { " S[y1] = [ -5 -6 ]\n" " [ -7 -8 ]\n" " d = [ 20 40 ]\n" + " logNormalizationConstant: -4.0351\n" "isotropic dim=2 sigma=3\n"; EXPECT(assert_print_equal(expected2, conditional2, "GaussianConditional")); } From 114c86fc6bd1f81c57d5c9653cf15f8dd39596c8 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 3 Jan 2024 16:15:37 -0500 Subject: [PATCH 028/398] GaussianConditional wrapper for arbitrary number of keys --- gtsam/linear/linear.i | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/gtsam/linear/linear.i b/gtsam/linear/linear.i index d4a045f09..6862646ae 100644 --- a/gtsam/linear/linear.i +++ b/gtsam/linear/linear.i @@ -489,12 +489,17 @@ virtual class GaussianConditional : gtsam::JacobianFactor { GaussianConditional(size_t key, Vector d, Matrix R, size_t name1, Matrix S, size_t name2, Matrix T, const gtsam::noiseModel::Diagonal* sigmas); + GaussianConditional(const vector> terms, + size_t nrFrontals, Vector d, + const gtsam::noiseModel::Diagonal* sigmas); // Constructors with no noise model GaussianConditional(size_t key, Vector d, Matrix R); GaussianConditional(size_t key, Vector d, Matrix R, size_t name1, Matrix S); GaussianConditional(size_t key, Vector d, Matrix R, size_t name1, Matrix S, size_t name2, Matrix T); + GaussianConditional(const gtsam::KeyVector& keys, size_t nrFrontals, + const gtsam::VerticalBlockMatrix& augmentedMatrix); // Named constructors static gtsam::GaussianConditional FromMeanAndStddev(gtsam::Key key, From 82e0c0dae10e58e5a15d3930baeb031585e7946f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 3 Jan 2024 16:26:05 -0500 Subject: [PATCH 029/398] take comment all the way --- gtsam/linear/GaussianConditional.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index 4ec1d8b95..d59070aaf 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -186,7 +186,10 @@ namespace gtsam { size_t n = d().size(); // Sigma = (R'R)^{-1}, det(Sigma) = det((R'R)^{-1}) = det(R'R)^{-1} // log det(Sigma) = -log(det(R'R)) = -2*log(det(R)) - // Hence, log det(Sigma)) = - 2.0 * logDeterminant() + // Hence, log det(Sigma)) = -2.0 * logDeterminant() + // which gives log = -0.5*n*log(2*pi) - 0.5*(-2.0 * logDeterminant()) + // = -0.5*n*log(2*pi) + (0.5*2.0 * logDeterminant()) + // = -0.5*n*log(2*pi) + logDeterminant() return -0.5 * n * log2pi + logDeterminant(); } From 8a61c49bb3ca447af3d7bac86c1694a73c0b398e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 3 Jan 2024 16:32:21 -0500 Subject: [PATCH 030/398] add model_selection method to HybridBayesNet --- gtsam/hybrid/HybridBayesNet.cpp | 50 ++++++++++++++++----------------- gtsam/hybrid/HybridBayesNet.h | 13 +++++++++ 2 files changed, 37 insertions(+), 26 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 0352d7962..81f4badea 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -283,35 +283,20 @@ GaussianBayesNetValTree HybridBayesNet::assembleTree() const { } /* ************************************************************************* */ -HybridValues HybridBayesNet::optimize() const { - // Collect all the discrete factors to compute MPE - DiscreteFactorGraph discrete_fg; - +AlgebraicDecisionTree HybridBayesNet::model_selection() const { /* - Perform the integration of L(X;M,Z)P(X|M) - which is the model selection term. + To perform model selection, we need: + q(mu; M, Z) * sqrt((2*pi)^n*det(Sigma)) - By Bayes' rule, P(X|M,Z) ∝ L(X;M,Z)P(X|M), - hence L(X;M,Z)P(X|M) is the unnormalized probabilty of - the joint Gaussian distribution. + If q(mu; M, Z) = exp(-error) & k = 1.0 / sqrt((2*pi)^n*det(Sigma)) + thus, q * sqrt((2*pi)^n*det(Sigma)) = q/k = exp(log(q/k)) + = exp(log(q) - log(k)) = exp(-error - log(k)) + = exp(-(error + log(k))), + where error is computed at the corresponding MAP point, gbn.error(mu). - This can be computed by multiplying all the exponentiated errors - of each of the conditionals, which we do below in hybrid case. - */ - /* - To perform model selection, we need: - q(mu; M, Z) * sqrt((2*pi)^n*det(Sigma)) + So we compute (error + log(k)) and exponentiate later + */ - If q(mu; M, Z) = exp(-error) & k = 1.0 / sqrt((2*pi)^n*det(Sigma)) - thus, q * sqrt((2*pi)^n*det(Sigma)) = q/k = exp(log(q/k)) - = exp(log(q) - log(k)) = exp(-error - log(k)) - = exp(-(error + log(k))), - where error is computed at the corresponding MAP point, gbn.error(mu). - - So we compute (error + log(k)) and exponentiate later - */ - - std::set discreteKeySet; GaussianBayesNetValTree bnTree = assembleTree(); GaussianBayesNetValTree bn_error = bnTree.apply( @@ -356,6 +341,19 @@ HybridValues HybridBayesNet::optimize() const { [&max_log](const double &x) { return std::exp(x - max_log); }); model_selection = model_selection.normalize(model_selection.sum()); + return model_selection; +} + +/* ************************************************************************* */ +HybridValues HybridBayesNet::optimize() const { + // Collect all the discrete factors to compute MPE + DiscreteFactorGraph discrete_fg; + + // Compute model selection term + AlgebraicDecisionTree model_selection_term = model_selection(); + + // Get the set of all discrete keys involved in model selection + std::set discreteKeySet; for (auto &&conditional : *this) { if (conditional->isDiscrete()) { discrete_fg.push_back(conditional->asDiscrete()); @@ -380,7 +378,7 @@ HybridValues HybridBayesNet::optimize() const { if (discreteKeySet.size() > 0) { discrete_fg.push_back(DecisionTreeFactor( DiscreteKeys(discreteKeySet.begin(), discreteKeySet.end()), - model_selection)); + model_selection_term)); } // Solve for the MPE diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index f0cdbaaf9..8acdd5b1b 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -120,6 +120,19 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { GaussianBayesNetValTree assembleTree() const; + /* + Perform the integration of L(X;M,Z)P(X|M) + which is the model selection term. + + By Bayes' rule, P(X|M,Z) ∝ L(X;M,Z)P(X|M), + hence L(X;M,Z)P(X|M) is the unnormalized probabilty of + the joint Gaussian distribution. + + This can be computed by multiplying all the exponentiated errors + of each of the conditionals. + */ + AlgebraicDecisionTree model_selection() const; + /** * @brief Solve the HybridBayesNet by first computing the MPE of all the * discrete variables and then optimizing the continuous variables based on From 3ba54eb25da317a81327eae0de3a4a382d9bee2d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 3 Jan 2024 16:37:49 -0500 Subject: [PATCH 031/398] improved docstrings --- gtsam/hybrid/GaussianMixture.h | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index a52cd6c82..65c631ece 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -67,13 +67,14 @@ class GTSAM_EXPORT GaussianMixture double logConstant_; ///< log of the normalization constant. /** - * @brief Convert a DecisionTree of factors into a DT of Gaussian FGs. + * @brief Convert a DecisionTree of factors into + * a DecisionTree of Gaussian factor graphs. */ GaussianFactorGraphTree asGaussianFactorGraphTree() const; /** * @brief Convert a DecisionTree of conditionals into - * a DT of Gaussian Bayes nets. + * a DecisionTree of Gaussian Bayes nets. */ GaussianBayesNetTree asGaussianBayesNetTree() const; From bb95cd40d6a4434bf3bacffd155252a24e9266ef Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 3 Jan 2024 16:38:25 -0500 Subject: [PATCH 032/398] remove `using std::dynamic_pointer_cast;` --- gtsam/hybrid/HybridBayesNet.cpp | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 81f4badea..933ef2703 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -26,8 +26,6 @@ static std::mt19937_64 kRandomNumberGenerator(42); namespace gtsam { -using std::dynamic_pointer_cast; - /* ************************************************************************ */ // Throw a runtime exception for method specified in string s, // and conditional f: @@ -253,9 +251,9 @@ GaussianBayesNetValTree HybridBayesNet::assembleTree() const { for (auto &f : factors_) { // TODO(dellaert): just use a virtual method defined in HybridFactor. - if (auto gm = dynamic_pointer_cast(f)) { + if (auto gm = std::dynamic_pointer_cast(f)) { result = gm->add(result); - } else if (auto hc = dynamic_pointer_cast(f)) { + } else if (auto hc = std::dynamic_pointer_cast(f)) { if (auto gm = hc->asMixture()) { result = gm->add(result); } else if (auto g = hc->asGaussian()) { @@ -265,7 +263,7 @@ GaussianBayesNetValTree HybridBayesNet::assembleTree() const { // TODO(dellaert): in C++20, we can use std::visit. continue; } - } else if (dynamic_pointer_cast(f)) { + } else if (std::dynamic_pointer_cast(f)) { // Don't do anything for discrete-only factors // since we want to evaluate continuous values only. continue; From 6d50de8c1cc10f7b778373666c46dd1e730d1f5e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 3 Jan 2024 16:40:04 -0500 Subject: [PATCH 033/398] docstring for HybridBayesNet::assembleTree --- gtsam/hybrid/HybridBayesNet.h | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 8acdd5b1b..cedb379c2 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -118,6 +118,14 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { return evaluate(values); } + /** + * @brief Assemble a DecisionTree of (GaussianBayesNet, double) leaves for + * each discrete assignment. + * The included double value is used to make + * constructing the model selection term cleaner and more efficient. + * + * @return GaussianBayesNetValTree + */ GaussianBayesNetValTree assembleTree() const; /* From 0430fee3777482886ec3ca510d79bbc073186f5a Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 7 Jan 2024 15:49:33 -0500 Subject: [PATCH 034/398] improved naming and documentation --- gtsam/hybrid/HybridBayesNet.cpp | 18 +++++++++--------- gtsam/hybrid/HybridBayesNet.h | 9 ++++++--- 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index e9870e6bf..23ee72215 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -281,7 +281,7 @@ GaussianBayesNetValTree HybridBayesNet::assembleTree() const { } /* ************************************************************************* */ -AlgebraicDecisionTree HybridBayesNet::model_selection() const { +AlgebraicDecisionTree HybridBayesNet::modelSelection() const { /* To perform model selection, we need: q(mu; M, Z) * sqrt((2*pi)^n*det(Sigma)) @@ -330,16 +330,16 @@ AlgebraicDecisionTree HybridBayesNet::model_selection() const { }); // Compute model selection term (with help from ADT methods) - AlgebraicDecisionTree model_selection_term = + AlgebraicDecisionTree modelSelectionTerm = (errorTree + log_norm_constants) * -1; - double max_log = model_selection_term.max(); - AlgebraicDecisionTree model_selection = DecisionTree( - model_selection_term, + double max_log = modelSelectionTerm.max(); + modelSelectionTerm = DecisionTree( + modelSelectionTerm, [&max_log](const double &x) { return std::exp(x - max_log); }); - model_selection = model_selection.normalize(model_selection.sum()); + modelSelectionTerm = modelSelectionTerm.normalize(modelSelectionTerm.sum()); - return model_selection; + return modelSelectionTerm; } /* ************************************************************************* */ @@ -348,7 +348,7 @@ HybridValues HybridBayesNet::optimize() const { DiscreteFactorGraph discrete_fg; // Compute model selection term - AlgebraicDecisionTree model_selection_term = model_selection(); + AlgebraicDecisionTree modelSelectionTerm = modelSelection(); // Get the set of all discrete keys involved in model selection std::set discreteKeySet; @@ -376,7 +376,7 @@ HybridValues HybridBayesNet::optimize() const { if (discreteKeySet.size() > 0) { discrete_fg.push_back(DecisionTreeFactor( DiscreteKeys(discreteKeySet.begin(), discreteKeySet.end()), - model_selection_term)); + modelSelectionTerm)); } // Solve for the MPE diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 757e60aea..9d16a4e14 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -129,8 +129,11 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { GaussianBayesNetValTree assembleTree() const; /* - Perform the integration of L(X;M,Z)P(X|M) - which is the model selection term. + Compute L(M;Z), the likelihood of the discrete model M + given the measurements Z. + This is called the model selection term. + + To do so, we perform the integration of L(M;Z) ∝ L(X;M,Z)P(X|M). By Bayes' rule, P(X|M,Z) ∝ L(X;M,Z)P(X|M), hence L(X;M,Z)P(X|M) is the unnormalized probabilty of @@ -139,7 +142,7 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { This can be computed by multiplying all the exponentiated errors of each of the conditionals. */ - AlgebraicDecisionTree model_selection() const; + AlgebraicDecisionTree modelSelection() const; /** * @brief Solve the HybridBayesNet by first computing the MPE of all the From afcb93390ad05a93a8d9966e16184b935d76f4a8 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 7 Jan 2024 15:54:40 -0500 Subject: [PATCH 035/398] document return type --- gtsam/hybrid/HybridBayesNet.h | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 9d16a4e14..fe9010b1f 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -141,6 +141,8 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { This can be computed by multiplying all the exponentiated errors of each of the conditionals. + + Return a tree where each leaf value is L(M_i;Z). */ AlgebraicDecisionTree modelSelection() const; From 5cdbacfd4408db583c1726dd301b64fe60a659ca Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 7 Jan 2024 20:23:58 -0500 Subject: [PATCH 036/398] test and fix for issue 301 --- gtsam/nonlinear/ISAM2.cpp | 15 +++-- tests/testDoglegOptimizer.cpp | 122 ++++++++++++++++++++++++++++++++-- 2 files changed, 126 insertions(+), 11 deletions(-) diff --git a/gtsam/nonlinear/ISAM2.cpp b/gtsam/nonlinear/ISAM2.cpp index 0b5449b58..47857a2a2 100644 --- a/gtsam/nonlinear/ISAM2.cpp +++ b/gtsam/nonlinear/ISAM2.cpp @@ -424,6 +424,11 @@ ISAM2Result ISAM2::update(const NonlinearFactorGraph& newFactors, ISAM2Result result(params_.enableDetailedResults); UpdateImpl update(params_, updateParams); + // Initialize any new variables \Theta_{new} and add + // \Theta:=\Theta\cup\Theta_{new}. + // Needed before delta update if using Dogleg optimizer. + addVariables(newTheta, result.details()); + // Update delta if we need it to check relinearization later if (update.relinarizationNeeded(update_count_)) updateDelta(updateParams.forceFullSolve); @@ -435,9 +440,7 @@ ISAM2Result ISAM2::update(const NonlinearFactorGraph& newFactors, update.computeUnusedKeys(newFactors, variableIndex_, result.keysWithRemovedFactors, &result.unusedKeys); - // 2. Initialize any new variables \Theta_{new} and add - // \Theta:=\Theta\cup\Theta_{new}. - addVariables(newTheta, result.details()); + // 2. Compute new error to check for relinearization if (params_.evaluateNonlinearError) update.error(nonlinearFactors_, calculateEstimate(), &result.errorBefore); @@ -731,6 +734,7 @@ void ISAM2::updateDelta(bool forceFullSolve) const { effectiveWildfireThreshold, &delta_); deltaReplacedMask_.clear(); gttoc(Wildfire_update); + } else if (std::holds_alternative(params_.optimizationParams)) { // If using Dogleg, do a Dogleg step const ISAM2DoglegParams& doglegParams = @@ -769,9 +773,8 @@ void ISAM2::updateDelta(bool forceFullSolve) const { gttic(Copy_dx_d); // Update Delta and linear step doglegDelta_ = doglegResult.delta; - delta_ = - doglegResult - .dx_d; // Copy the VectorValues containing with the linear solution + // Copy the VectorValues containing with the linear solution + delta_ = doglegResult.dx_d; gttoc(Copy_dx_d); } else { throw std::runtime_error("iSAM2: unknown ISAM2Params type"); diff --git a/tests/testDoglegOptimizer.cpp b/tests/testDoglegOptimizer.cpp index e64e5ab7a..d7de63d3d 100644 --- a/tests/testDoglegOptimizer.cpp +++ b/tests/testDoglegOptimizer.cpp @@ -24,10 +24,9 @@ #include #include #include -#include -#include -#include -#include +#include +#include +#include "examples/SFMdata.h" #include @@ -36,7 +35,6 @@ using namespace gtsam; // Convenience for named keys using symbol_shorthand::X; -using symbol_shorthand::L; /* ************************************************************************* */ TEST(DoglegOptimizer, ComputeBlend) { @@ -185,6 +183,120 @@ TEST(DoglegOptimizer, Constraint) { #endif } +/* ************************************************************************* */ +TEST(DogLegOptimizer, VariableUpdate) { + // Make the typename short so it looks much cleaner + typedef SmartProjectionPoseFactor SmartFactor; + + // create a typedef to the camera type + typedef PinholePose Camera; + // Define the camera calibration parameters + Cal3_S2::shared_ptr K(new Cal3_S2(50.0, 50.0, 0.0, 50.0, 50.0)); + + // Define the camera observation noise model + noiseModel::Isotropic::shared_ptr measurementNoise = + noiseModel::Isotropic::Sigma(2, 1.0); // one pixel in u and v + + // Create the set of ground-truth landmarks and poses + vector points = createPoints(); + vector poses = createPoses(); + + // Create a factor graph + NonlinearFactorGraph graph; + + ISAM2DoglegParams doglegparams = ISAM2DoglegParams(); + doglegparams.verbose = false; + ISAM2Params isam2_params; + isam2_params.evaluateNonlinearError = true; + isam2_params.relinearizeThreshold = 0.0; + isam2_params.enableRelinearization = true; + isam2_params.optimizationParams = doglegparams; + isam2_params.relinearizeSkip = 1; + ISAM2 isam2(isam2_params); + + // Simulated measurements from each camera pose, adding them to the factor + // graph + unordered_map smart_factors; + for (size_t j = 0; j < points.size(); ++j) { + // every landmark represent a single landmark, we use shared pointer to init + // the factor, and then insert measurements. + SmartFactor::shared_ptr smartfactor(new SmartFactor(measurementNoise, K)); + + for (size_t i = 0; i < poses.size(); ++i) { + // generate the 2D measurement + Camera camera(poses[i], K); + Point2 measurement = camera.project(points[j]); + + // call add() function to add measurement into a single factor, here we + // need to add: + // 1. the 2D measurement + // 2. the corresponding camera's key + // 3. camera noise model + // 4. camera calibration + + // add only first 3 measurements and update the later measurements + // incrementally + if (i < 3) smartfactor->add(measurement, i); + } + + // insert the smart factor in the graph + smart_factors[j] = smartfactor; + graph.push_back(smartfactor); + } + + // Add a prior on pose x0. This indirectly specifies where the origin is. + // 30cm std on x,y,z 0.1 rad on roll,pitch,yaw + noiseModel::Diagonal::shared_ptr noise = noiseModel::Diagonal::Sigmas( + (Vector(6) << Vector3::Constant(0.3), Vector3::Constant(0.1)).finished()); + graph.emplace_shared >(0, poses[0], noise); + + // Because the structure-from-motion problem has a scale ambiguity, the + // problem is still under-constrained. Here we add a prior on the second pose + // x1, so this will fix the scale by indicating the distance between x0 and + // x1. Because these two are fixed, the rest of the poses will be also be + // fixed. + graph.emplace_shared >(1, poses[1], + noise); // add directly to graph + + // Create the initial estimate to the solution + // Intentionally initialize the variables off from the ground truth + Values initialEstimate; + Pose3 delta(Rot3::Rodrigues(-0.1, 0.2, 0.25), Point3(0.05, -0.10, 0.20)); + for (size_t i = 0; i < 3; ++i) + initialEstimate.insert(i, poses[i].compose(delta)); + // initialEstimate.print("Initial Estimates:\n"); + + // Optimize the graph and print results + isam2.update(graph, initialEstimate); + Values result = isam2.calculateEstimate(); + // result.print("Results:\n"); + + // we add new measurements from this pose + size_t pose_idx = 3; + + // Now update existing smart factors with new observations + for (size_t j = 0; j < points.size(); ++j) { + SmartFactor::shared_ptr smartfactor = smart_factors[j]; + + // add the 4th measurement + Camera camera(poses[pose_idx], K); + Point2 measurement = camera.project(points[j]); + smartfactor->add(measurement, pose_idx); + } + + graph.resize(0); + initialEstimate.clear(); + + // update initial estimate for the new pose + initialEstimate.insert(pose_idx, poses[pose_idx].compose(delta)); + + // this should break the system + isam2.update(graph, initialEstimate); + result = isam2.calculateEstimate(); + EXPECT(std::find(result.keys().begin(), result.keys().end(), pose_idx) != + result.keys().end()); +} + /* ************************************************************************* */ int main() { TestResult tr; return TestRegistry::runAllTests(tr); } /* ************************************************************************* */ From 4c8e4973bf00d6d01379ab0b01bc481a653ea86a Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 8 Jan 2024 08:52:47 -0500 Subject: [PATCH 037/398] update Rot2::fromCosSin docstring --- gtsam/geometry/Rot2.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/geometry/Rot2.h b/gtsam/geometry/Rot2.h index 63b914434..6bb97ff6c 100644 --- a/gtsam/geometry/Rot2.h +++ b/gtsam/geometry/Rot2.h @@ -68,7 +68,7 @@ namespace gtsam { return fromAngle(theta * degree); } - /// Named constructor from cos(theta),sin(theta) pair, will *not* normalize! + /// Named constructor from cos(theta),sin(theta) pair static Rot2 fromCosSin(double c, double s); /** From c5bfd524e0ca8fa8c49fe5ad8183492b748b81ad Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 12 Jan 2024 10:28:02 -0500 Subject: [PATCH 038/398] better printing of GaussianMixtureFactor --- gtsam/hybrid/GaussianMixtureFactor.cpp | 4 +++- gtsam/hybrid/GaussianMixtureFactor.h | 5 ++--- gtsam/hybrid/tests/testGaussianMixtureFactor.cpp | 3 ++- gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp | 2 ++ 4 files changed, 9 insertions(+), 5 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index a3db16d04..4e138acfa 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -54,7 +54,9 @@ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { /* *******************************************************************************/ void GaussianMixtureFactor::print(const std::string &s, const KeyFormatter &formatter) const { - HybridFactor::print(s, formatter); + std::cout << (s.empty() ? "" : s + "\n"); + std::cout << "GaussianMixtureFactor" << std::endl; + HybridFactor::print("", formatter); std::cout << "{\n"; if (factors_.empty()) { std::cout << " empty" << std::endl; diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 63ca9e923..913426a98 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -107,9 +107,8 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { bool equals(const HybridFactor &lf, double tol = 1e-9) const override; - void print( - const std::string &s = "GaussianMixtureFactor\n", - const KeyFormatter &formatter = DefaultKeyFormatter) const override; + void print(const std::string &s = "", const KeyFormatter &formatter = + DefaultKeyFormatter) const override; /// @} /// @name Standard API diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 9cc7e6bfd..11b01f074 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -106,7 +106,8 @@ TEST(GaussianMixtureFactor, Printing) { GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); std::string expected = - R"(Hybrid [x1 x2; 1]{ + R"(GaussianMixtureFactor +Hybrid [x1 x2; 1]{ Choice(1) 0 Leaf : A[x1] = [ diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 93081d309..0d4bf27c4 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -510,6 +510,7 @@ factor 0: b = [ -10 ] No noise model factor 1: +GaussianMixtureFactor Hybrid [x0 x1; m0]{ Choice(m0) 0 Leaf : @@ -534,6 +535,7 @@ Hybrid [x0 x1; m0]{ } factor 2: +GaussianMixtureFactor Hybrid [x1 x2; m1]{ Choice(m1) 0 Leaf : From f3e84004bfa94f57024d35c784693902d009d180 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 16 Jan 2024 14:59:47 -0500 Subject: [PATCH 039/398] helper functions for computing leaf error and normalization constants used for model selection --- gtsam/hybrid/HybridBayesNet.cpp | 85 ++++++++++++++++++++------------- gtsam/hybrid/HybridBayesNet.h | 37 +++++++++++++- 2 files changed, 87 insertions(+), 35 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 23ee72215..eeacc929b 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -227,24 +227,6 @@ GaussianBayesNet HybridBayesNet::choose( return gbn; } -/* ************************************************************************ */ -static GaussianBayesNetTree addGaussian( - const GaussianBayesNetTree &gfgTree, - const GaussianConditional::shared_ptr &factor) { - // If the decision tree is not initialized, then initialize it. - if (gfgTree.empty()) { - GaussianBayesNet result{factor}; - return GaussianBayesNetTree(result); - } else { - auto add = [&factor](const GaussianBayesNet &graph) { - auto result = graph; - result.push_back(factor); - return result; - }; - return gfgTree.apply(add); - } -} - /* ************************************************************************ */ GaussianBayesNetValTree HybridBayesNet::assembleTree() const { GaussianBayesNetTree result; @@ -320,25 +302,12 @@ AlgebraicDecisionTree HybridBayesNet::modelSelection() const { AlgebraicDecisionTree errorTree = trees.second; // Only compute logNormalizationConstant - AlgebraicDecisionTree log_norm_constants = DecisionTree( - bnTree, [](const std::pair &gbnAndValue) { - GaussianBayesNet gbn = gbnAndValue.first; - if (gbn.size() == 0) { - return 0.0; - } - return gbn.logNormalizationConstant(); - }); + AlgebraicDecisionTree log_norm_constants = + computeLogNormConstants(bnTree); // Compute model selection term (with help from ADT methods) AlgebraicDecisionTree modelSelectionTerm = - (errorTree + log_norm_constants) * -1; - - double max_log = modelSelectionTerm.max(); - modelSelectionTerm = DecisionTree( - modelSelectionTerm, - [&max_log](const double &x) { return std::exp(x - max_log); }); - modelSelectionTerm = modelSelectionTerm.normalize(modelSelectionTerm.sum()); - + computeModelSelectionTerm(errorTree, log_norm_constants); return modelSelectionTerm; } @@ -530,4 +499,52 @@ HybridGaussianFactorGraph HybridBayesNet::toFactorGraph( return fg; } +/* ************************************************************************ */ +GaussianBayesNetTree addGaussian( + const GaussianBayesNetTree &gbnTree, + const GaussianConditional::shared_ptr &factor) { + // If the decision tree is not initialized, then initialize it. + if (gbnTree.empty()) { + GaussianBayesNet result{factor}; + return GaussianBayesNetTree(result); + } else { + auto add = [&factor](const GaussianBayesNet &graph) { + auto result = graph; + result.push_back(factor); + return result; + }; + return gbnTree.apply(add); + } +} + +/* ************************************************************************* */ +AlgebraicDecisionTree computeLogNormConstants( + const GaussianBayesNetValTree &bnTree) { + AlgebraicDecisionTree log_norm_constants = DecisionTree( + bnTree, [](const std::pair &gbnAndValue) { + GaussianBayesNet gbn = gbnAndValue.first; + if (gbn.size() == 0) { + return 0.0; + } + return gbn.logNormalizationConstant(); + }); + return log_norm_constants; +} + +/* ************************************************************************* */ +AlgebraicDecisionTree computeModelSelectionTerm( + const AlgebraicDecisionTree &errorTree, + const AlgebraicDecisionTree &log_norm_constants) { + AlgebraicDecisionTree modelSelectionTerm = + (errorTree + log_norm_constants) * -1; + + double max_log = modelSelectionTerm.max(); + modelSelectionTerm = DecisionTree( + modelSelectionTerm, + [&max_log](const double &x) { return std::exp(x - max_log); }); + modelSelectionTerm = modelSelectionTerm.normalize(modelSelectionTerm.sum()); + + return modelSelectionTerm; +} + } // namespace gtsam diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index fe9010b1f..d4fd3db71 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -141,7 +141,7 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { This can be computed by multiplying all the exponentiated errors of each of the conditionals. - + Return a tree where each leaf value is L(M_i;Z). */ AlgebraicDecisionTree modelSelection() const; @@ -280,4 +280,39 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { template <> struct traits : public Testable {}; +/** + * @brief Add a Gaussian conditional to each node of the GaussianBayesNetTree + * + * @param gbnTree + * @param factor + * @return GaussianBayesNetTree + */ +GaussianBayesNetTree addGaussian(const GaussianBayesNetTree &gbnTree, + const GaussianConditional::shared_ptr &factor); + +/** + * @brief Compute the (logarithmic) normalization constant for each Bayes + * network in the tree. + * + * @param bnTree A tree of Bayes networks in each leaf. The tree encodes a + * discrete assignment yielding the Bayes net. + * @return AlgebraicDecisionTree + */ +AlgebraicDecisionTree computeLogNormConstants( + const GaussianBayesNetValTree &bnTree); + +/** + * @brief Compute the model selection term L(M; Z, X) given the error + * and log normalization constants. + * + * Perform normalization to handle underflow issues. + * + * @param errorTree + * @param log_norm_constants + * @return AlgebraicDecisionTree + */ +AlgebraicDecisionTree computeModelSelectionTerm( + const AlgebraicDecisionTree &errorTree, + const AlgebraicDecisionTree &log_norm_constants); + } // namespace gtsam From e7cb7b2dcdc730ffd5a7309e69f03121ac41339b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 16 Jan 2024 15:00:35 -0500 Subject: [PATCH 040/398] add constructor for HybridFactorGraph which takes a container of factors --- gtsam/hybrid/HybridFactorGraph.h | 6 +++++- gtsam/hybrid/HybridGaussianFactorGraph.h | 5 +++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridFactorGraph.h b/gtsam/hybrid/HybridFactorGraph.h index 8b59fd4f9..8881a1d35 100644 --- a/gtsam/hybrid/HybridFactorGraph.h +++ b/gtsam/hybrid/HybridFactorGraph.h @@ -38,7 +38,7 @@ using SharedFactor = std::shared_ptr; class GTSAM_EXPORT HybridFactorGraph : public FactorGraph { public: using Base = FactorGraph; - using This = HybridFactorGraph; ///< this class + using This = HybridFactorGraph; ///< this class using shared_ptr = std::shared_ptr; ///< shared_ptr to This using Values = gtsam::Values; ///< backwards compatibility @@ -59,6 +59,10 @@ class GTSAM_EXPORT HybridFactorGraph : public FactorGraph { template HybridFactorGraph(const FactorGraph& graph) : Base(graph) {} + /** Construct from container of factors (shared_ptr or plain objects) */ + template + explicit HybridFactorGraph(const CONTAINER& factors) : Base(factors) {} + /// @} /// @name Extra methods to inspect discrete/continuous keys. /// @{ diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 1708ff64b..8a86a7335 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -126,6 +126,11 @@ class GTSAM_EXPORT HybridGaussianFactorGraph /// @brief Default constructor. HybridGaussianFactorGraph() = default; + /** Construct from container of factors (shared_ptr or plain objects) */ + template + explicit HybridGaussianFactorGraph(const CONTAINER& factors) + : Base(factors) {} + /** * Implicit copy/downcast constructor to override explicit template container * constructor. In BayesTree this is used for: From 4b2a22eaa564e4593cc1c28f12629f35e218598e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 16 Jan 2024 15:01:31 -0500 Subject: [PATCH 041/398] model selection for HybridBayesTree --- gtsam/hybrid/HybridBayesTree.cpp | 105 +++++++++++++++++++++++++++++-- gtsam/hybrid/HybridBayesTree.h | 45 +++++++++++++ 2 files changed, 146 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/HybridBayesTree.cpp b/gtsam/hybrid/HybridBayesTree.cpp index ae8fa0378..cdd30d398 100644 --- a/gtsam/hybrid/HybridBayesTree.cpp +++ b/gtsam/hybrid/HybridBayesTree.cpp @@ -38,19 +38,116 @@ bool HybridBayesTree::equals(const This& other, double tol) const { return Base::equals(other, tol); } +GaussianBayesNetTree& HybridBayesTree::addCliqueToTree( + const sharedClique& clique, GaussianBayesNetTree& result) const { + // Perform bottom-up inclusion + for (sharedClique child : clique->children) { + result = addCliqueToTree(child, result); + } + + auto f = clique->conditional(); + + if (auto hc = std::dynamic_pointer_cast(f)) { + if (auto gm = hc->asMixture()) { + result = gm->add(result); + } else if (auto g = hc->asGaussian()) { + result = addGaussian(result, g); + } else { + // Has to be discrete, which we don't add. + } + } + return result; +} + +/* ************************************************************************ */ +GaussianBayesNetValTree HybridBayesTree::assembleTree() const { + GaussianBayesNetTree result; + for (auto&& root : roots_) { + result = addCliqueToTree(root, result); + } + + GaussianBayesNetValTree resultTree(result, [](const GaussianBayesNet& gbn) { + return std::make_pair(gbn, 0.0); + }); + return resultTree; +} + +/* ************************************************************************* */ +AlgebraicDecisionTree HybridBayesTree::modelSelection() const { + /* + To perform model selection, we need: + q(mu; M, Z) * sqrt((2*pi)^n*det(Sigma)) + + If q(mu; M, Z) = exp(-error) & k = 1.0 / sqrt((2*pi)^n*det(Sigma)) + thus, q * sqrt((2*pi)^n*det(Sigma)) = q/k = exp(log(q/k)) + = exp(log(q) - log(k)) = exp(-error - log(k)) + = exp(-(error + log(k))), + where error is computed at the corresponding MAP point, gbt.error(mu). + + So we compute (error + log(k)) and exponentiate later + */ + + GaussianBayesNetValTree bnTree = assembleTree(); + + GaussianBayesNetValTree bn_error = bnTree.apply( + [this](const Assignment& assignment, + const std::pair& gbnAndValue) { + // Compute the X* of each assignment + VectorValues mu = gbnAndValue.first.optimize(); + + // mu is empty if gbn had nullptrs + if (mu.size() == 0) { + return std::make_pair(gbnAndValue.first, + std::numeric_limits::max()); + } + + // Compute the error for X* and the assignment + double error = + this->error(HybridValues(mu, DiscreteValues(assignment))); + + return std::make_pair(gbnAndValue.first, error); + }); + + auto trees = unzip(bn_error); + AlgebraicDecisionTree errorTree = trees.second; + + // Only compute logNormalizationConstant + AlgebraicDecisionTree log_norm_constants = + computeLogNormConstants(bnTree); + + // Compute model selection term (with help from ADT methods) + AlgebraicDecisionTree modelSelectionTerm = + computeModelSelectionTerm(errorTree, log_norm_constants); + + return modelSelectionTerm; +} + /* ************************************************************************* */ HybridValues HybridBayesTree::optimize() const { - DiscreteBayesNet dbn; + DiscreteFactorGraph discrete_fg; DiscreteValues mpe; + // Compute model selection term + AlgebraicDecisionTree modelSelectionTerm = modelSelection(); + auto root = roots_.at(0); // Access the clique and get the underlying hybrid conditional HybridConditional::shared_ptr root_conditional = root->conditional(); - // The root should be discrete only, we compute the MPE + // Get the set of all discrete keys involved in model selection + std::set discreteKeySet; + + // The root should be discrete only, we compute the MPE if (root_conditional->isDiscrete()) { - dbn.push_back(root_conditional->asDiscrete()); - mpe = DiscreteFactorGraph(dbn).optimize(); + discrete_fg.push_back(root_conditional->asDiscrete()); + + // Only add model_selection if we have discrete keys + if (discreteKeySet.size() > 0) { + discrete_fg.push_back(DecisionTreeFactor( + DiscreteKeys(discreteKeySet.begin(), discreteKeySet.end()), + modelSelectionTerm)); + } + mpe = discrete_fg.optimize(); } else { throw std::runtime_error( "HybridBayesTree root is not discrete-only. Please check elimination " diff --git a/gtsam/hybrid/HybridBayesTree.h b/gtsam/hybrid/HybridBayesTree.h index f91e16cbf..8327b7f31 100644 --- a/gtsam/hybrid/HybridBayesTree.h +++ b/gtsam/hybrid/HybridBayesTree.h @@ -84,6 +84,51 @@ class GTSAM_EXPORT HybridBayesTree : public BayesTree { */ GaussianBayesTree choose(const DiscreteValues& assignment) const; + /** Error for all conditionals. */ + double error(const HybridValues& values) const { + return HybridGaussianFactorGraph(*this).error(values); + } + + /** + * @brief Helper function to add a clique of hybrid conditionals to the passed + * in GaussianBayesNetTree. Operates recursively on the clique in a bottom-up + * fashion, adding the children first. + * + * @param clique The + * @param result + * @return GaussianBayesNetTree& + */ + GaussianBayesNetTree& addCliqueToTree(const sharedClique& clique, + GaussianBayesNetTree& result) const; + + /** + * @brief Assemble a DecisionTree of (GaussianBayesTree, double) leaves for + * each discrete assignment. + * The included double value is used to make + * constructing the model selection term cleaner and more efficient. + * + * @return GaussianBayesNetValTree + */ + GaussianBayesNetValTree assembleTree() const; + + /* + Compute L(M;Z), the likelihood of the discrete model M + given the measurements Z. + This is called the model selection term. + + To do so, we perform the integration of L(M;Z) ∝ L(X;M,Z)P(X|M). + + By Bayes' rule, P(X|M,Z) ∝ L(X;M,Z)P(X|M), + hence L(X;M,Z)P(X|M) is the unnormalized probabilty of + the joint Gaussian distribution. + + This can be computed by multiplying all the exponentiated errors + of each of the conditionals. + + Return a tree where each leaf value is L(M_i;Z). + */ + AlgebraicDecisionTree modelSelection() const; + /** * @brief Optimize the hybrid Bayes tree by computing the MPE for the current * set of discrete variables and using it to compute the best continuous From 6d7dc57599b21e9dc6f3989324df67790d9982a1 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 17 Feb 2024 23:10:37 -0500 Subject: [PATCH 042/398] additional clarifying comments --- gtsam/hybrid/HybridBayesNet.cpp | 2 +- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index eeacc929b..d343decc8 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -291,7 +291,7 @@ AlgebraicDecisionTree HybridBayesNet::modelSelection() const { std::numeric_limits::max()); } - // Compute the error for X* and the assignment + // Compute the error at the MLE point X* for the current assignment double error = this->error(HybridValues(mu, DiscreteValues(assignment))); diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 42402076e..be94f42ee 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -295,8 +295,9 @@ static std::shared_ptr createDiscreteFactor( if (!factor) return 1.0; // TODO(dellaert): not loving this. // Logspace version of: - // exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); - return -factor->error(kEmpty) - conditional->logNormalizationConstant(); + // exp(-factor->error(kEmpty)) * conditional->normalizationConstant(); + // We take negative of the logNormalizationConstant (1/k) to get k + return -factor->error(kEmpty) + (-conditional->logNormalizationConstant()); }; AlgebraicDecisionTree logProbabilities( @@ -324,6 +325,7 @@ static std::shared_ptr createGaussianMixtureFactor( if (factor) { auto hf = std::dynamic_pointer_cast(factor); if (!hf) throw std::runtime_error("Expected HessianFactor!"); + // Add 2.0 term since the constant term will be premultiplied by 0.5 hf->constantTerm() += 2.0 * conditional->logNormalizationConstant(); } return factor; From f12a24e00c666f4f275cf9e942a37f27761bdf6c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 19 Feb 2024 11:33:13 -0500 Subject: [PATCH 043/398] fix comment --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index be94f42ee..ea8bd0b05 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -296,7 +296,7 @@ static std::shared_ptr createDiscreteFactor( // Logspace version of: // exp(-factor->error(kEmpty)) * conditional->normalizationConstant(); - // We take negative of the logNormalizationConstant (1/k) to get k + // We take negative of the logNormalizationConstant `log(1/k)` to get `log(k)`. return -factor->error(kEmpty) + (-conditional->logNormalizationConstant()); }; From d19ebe0fdfbaadc61d52aaea40dfdbf0ce6ecae2 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 19 Feb 2024 19:14:59 -0500 Subject: [PATCH 044/398] fix issue with Boost collisions --- gtsam/base/std_optional_serialization.h | 3 +++ 1 file changed, 3 insertions(+) diff --git a/gtsam/base/std_optional_serialization.h b/gtsam/base/std_optional_serialization.h index 5c250eab4..ac0c16c87 100644 --- a/gtsam/base/std_optional_serialization.h +++ b/gtsam/base/std_optional_serialization.h @@ -56,6 +56,8 @@ namespace std { template<> struct is_trivially_move_constructible& t, const unsigned int version) { } // namespace serialization } // namespace boost #endif +#endif From 8372d8490cc4161c09138c03d973a272af3e5fab Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Feb 2024 14:11:47 -0500 Subject: [PATCH 045/398] better printing --- gtsam/hybrid/GaussianMixture.cpp | 3 +-- gtsam/linear/GaussianConditional.cpp | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 7f42e6986..a6f816706 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -173,9 +173,8 @@ void GaussianMixture::print(const std::string &s, for (auto &dk : discreteKeys()) { std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), "; } + std::cout << "\n logNormalizationConstant: " << logConstant_ << std::endl; std::cout << "\n"; - std::cout << " logNormalizationConstant: " << logConstant_ << "\n" - << std::endl; conditionals_.print( "", [&](Key k) { return formatter(k); }, [&](const GaussianConditional::shared_ptr &gf) -> std::string { diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index f986eed02..585bbdb33 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -121,7 +121,7 @@ namespace gtsam { const auto mean = solve({}); // solve for mean. mean.print(" mean", formatter); } - cout << " logNormalizationConstant: " << logNormalizationConstant() << std::endl; + cout << " logNormalizationConstant: " << logNormalizationConstant() << endl; if (model_) model_->print(" Noise model: "); else From f62805f8b38b5bfc24986c535310cbc34d7a241d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 23 Feb 2024 12:49:51 -0500 Subject: [PATCH 046/398] add method to select underlying continuous Gaussian graph given discrete assignment --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 24 ++++++++- gtsam/hybrid/HybridGaussianFactorGraph.h | 4 ++ .../tests/testHybridGaussianFactorGraph.cpp | 52 +++++++++++++++++++ 3 files changed, 79 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index ea8bd0b05..32cdddec6 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -296,7 +296,8 @@ static std::shared_ptr createDiscreteFactor( // Logspace version of: // exp(-factor->error(kEmpty)) * conditional->normalizationConstant(); - // We take negative of the logNormalizationConstant `log(1/k)` to get `log(k)`. + // We take negative of the logNormalizationConstant `log(1/k)` + // to get `log(k)`. return -factor->error(kEmpty) + (-conditional->logNormalizationConstant()); }; @@ -326,6 +327,7 @@ static std::shared_ptr createGaussianMixtureFactor( auto hf = std::dynamic_pointer_cast(factor); if (!hf) throw std::runtime_error("Expected HessianFactor!"); // Add 2.0 term since the constant term will be premultiplied by 0.5 + // as per the Hessian definition hf->constantTerm() += 2.0 * conditional->logNormalizationConstant(); } return factor; @@ -563,4 +565,24 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::probPrime( return prob_tree; } +/* ************************************************************************ */ +GaussianFactorGraph HybridGaussianFactorGraph::operator()( + const DiscreteValues &assignment) const { + GaussianFactorGraph gfg; + for (auto &&f : *this) { + if (auto gf = std::dynamic_pointer_cast(f)) { + gfg.push_back(gf); + } else if (auto gc = std::dynamic_pointer_cast(f)) { + gfg.push_back(gf); + } else if (auto gmf = std::dynamic_pointer_cast(f)) { + gfg.push_back((*gmf)(assignment)); + } else if (auto gm = dynamic_pointer_cast(f)) { + gfg.push_back((*gm)(assignment)); + } else { + continue; + } + } + return gfg; +} + } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 8a86a7335..415464ecd 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -210,6 +210,10 @@ class GTSAM_EXPORT HybridGaussianFactorGraph GaussianFactorGraphTree assembleGraphTree() const; /// @} + + /// Get the GaussianFactorGraph at a given discrete assignment. + GaussianFactorGraph operator()(const DiscreteValues& assignment) const; + }; } // namespace gtsam diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 5be2f2742..b97dcef72 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -490,6 +490,58 @@ TEST(HybridGaussianFactorGraph, SwitchingTwoVar) { } } +/* ****************************************************************************/ +// Select a particular continuous factor graph given a discrete assignment +TEST(HybridGaussianFactorGraph, DiscreteSelection) { + Switching s(3); + + HybridGaussianFactorGraph graph = s.linearizedFactorGraph; + + DiscreteValues dv00{{M(0), 0}, {M(1), 0}}; + GaussianFactorGraph continuous_00 = graph(dv00); + GaussianFactorGraph expected_00; + expected_00.push_back(JacobianFactor(X(0), I_1x1 * 10, Vector1(-10))); + expected_00.push_back(JacobianFactor(X(0), -I_1x1, X(1), I_1x1, Vector1(-1))); + expected_00.push_back(JacobianFactor(X(1), -I_1x1, X(2), I_1x1, Vector1(-1))); + expected_00.push_back(JacobianFactor(X(1), I_1x1 * 10, Vector1(-10))); + expected_00.push_back(JacobianFactor(X(2), I_1x1 * 10, Vector1(-10))); + + EXPECT(assert_equal(expected_00, continuous_00)); + + DiscreteValues dv01{{M(0), 0}, {M(1), 1}}; + GaussianFactorGraph continuous_01 = graph(dv01); + GaussianFactorGraph expected_01; + expected_01.push_back(JacobianFactor(X(0), I_1x1 * 10, Vector1(-10))); + expected_01.push_back(JacobianFactor(X(0), -I_1x1, X(1), I_1x1, Vector1(-1))); + expected_01.push_back(JacobianFactor(X(1), -I_1x1, X(2), I_1x1, Vector1(-0))); + expected_01.push_back(JacobianFactor(X(1), I_1x1 * 10, Vector1(-10))); + expected_01.push_back(JacobianFactor(X(2), I_1x1 * 10, Vector1(-10))); + + EXPECT(assert_equal(expected_01, continuous_01)); + + DiscreteValues dv10{{M(0), 1}, {M(1), 0}}; + GaussianFactorGraph continuous_10 = graph(dv10); + GaussianFactorGraph expected_10; + expected_10.push_back(JacobianFactor(X(0), I_1x1 * 10, Vector1(-10))); + expected_10.push_back(JacobianFactor(X(0), -I_1x1, X(1), I_1x1, Vector1(-0))); + expected_10.push_back(JacobianFactor(X(1), -I_1x1, X(2), I_1x1, Vector1(-1))); + expected_10.push_back(JacobianFactor(X(1), I_1x1 * 10, Vector1(-10))); + expected_10.push_back(JacobianFactor(X(2), I_1x1 * 10, Vector1(-10))); + + EXPECT(assert_equal(expected_10, continuous_10)); + + DiscreteValues dv11{{M(0), 1}, {M(1), 1}}; + GaussianFactorGraph continuous_11 = graph(dv11); + GaussianFactorGraph expected_11; + expected_11.push_back(JacobianFactor(X(0), I_1x1 * 10, Vector1(-10))); + expected_11.push_back(JacobianFactor(X(0), -I_1x1, X(1), I_1x1, Vector1(-0))); + expected_11.push_back(JacobianFactor(X(1), -I_1x1, X(2), I_1x1, Vector1(-0))); + expected_11.push_back(JacobianFactor(X(1), I_1x1 * 10, Vector1(-10))); + expected_11.push_back(JacobianFactor(X(2), I_1x1 * 10, Vector1(-10))); + + EXPECT(assert_equal(expected_11, continuous_11)); +} + /* ************************************************************************* */ TEST(HybridGaussianFactorGraph, optimize) { HybridGaussianFactorGraph hfg; From 6e8e2579da7e48557ec388aa4905bac2a4470196 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 6 Mar 2024 16:01:30 -0500 Subject: [PATCH 047/398] update model selection code and docs to match the math --- gtsam/hybrid/HybridBayesNet.cpp | 41 +++++++++----------------------- gtsam/hybrid/HybridBayesNet.h | 41 +++++++++----------------------- gtsam/hybrid/HybridBayesTree.cpp | 14 ++++++----- 3 files changed, 30 insertions(+), 66 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index d343decc8..4c41e952b 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -265,16 +265,10 @@ GaussianBayesNetValTree HybridBayesNet::assembleTree() const { /* ************************************************************************* */ AlgebraicDecisionTree HybridBayesNet::modelSelection() const { /* - To perform model selection, we need: - q(mu; M, Z) * sqrt((2*pi)^n*det(Sigma)) - - If q(mu; M, Z) = exp(-error) & k = 1.0 / sqrt((2*pi)^n*det(Sigma)) - thus, q * sqrt((2*pi)^n*det(Sigma)) = q/k = exp(log(q/k)) - = exp(log(q) - log(k)) = exp(-error - log(k)) - = exp(-(error + log(k))), + To perform model selection, we need: q(mu; M, Z) = exp(-error) where error is computed at the corresponding MAP point, gbn.error(mu). - So we compute (error + log(k)) and exponentiate later + So we compute (-error) and exponentiate later */ GaussianBayesNetValTree bnTree = assembleTree(); @@ -301,13 +295,16 @@ AlgebraicDecisionTree HybridBayesNet::modelSelection() const { auto trees = unzip(bn_error); AlgebraicDecisionTree errorTree = trees.second; - // Only compute logNormalizationConstant - AlgebraicDecisionTree log_norm_constants = - computeLogNormConstants(bnTree); - // Compute model selection term (with help from ADT methods) - AlgebraicDecisionTree modelSelectionTerm = - computeModelSelectionTerm(errorTree, log_norm_constants); + AlgebraicDecisionTree modelSelectionTerm = errorTree * -1; + + // Exponentiate using our scheme + double max_log = modelSelectionTerm.max(); + modelSelectionTerm = DecisionTree( + modelSelectionTerm, + [&max_log](const double &x) { return std::exp(x - max_log); }); + modelSelectionTerm = modelSelectionTerm.normalize(modelSelectionTerm.sum()); + return modelSelectionTerm; } @@ -531,20 +528,4 @@ AlgebraicDecisionTree computeLogNormConstants( return log_norm_constants; } -/* ************************************************************************* */ -AlgebraicDecisionTree computeModelSelectionTerm( - const AlgebraicDecisionTree &errorTree, - const AlgebraicDecisionTree &log_norm_constants) { - AlgebraicDecisionTree modelSelectionTerm = - (errorTree + log_norm_constants) * -1; - - double max_log = modelSelectionTerm.max(); - modelSelectionTerm = DecisionTree( - modelSelectionTerm, - [&max_log](const double &x) { return std::exp(x - max_log); }); - modelSelectionTerm = modelSelectionTerm.normalize(modelSelectionTerm.sum()); - - return modelSelectionTerm; -} - } // namespace gtsam diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index d4fd3db71..c678c418c 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -128,22 +128,17 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { */ GaussianBayesNetValTree assembleTree() const; - /* - Compute L(M;Z), the likelihood of the discrete model M - given the measurements Z. - This is called the model selection term. - - To do so, we perform the integration of L(M;Z) ∝ L(X;M,Z)P(X|M). - - By Bayes' rule, P(X|M,Z) ∝ L(X;M,Z)P(X|M), - hence L(X;M,Z)P(X|M) is the unnormalized probabilty of - the joint Gaussian distribution. - - This can be computed by multiplying all the exponentiated errors - of each of the conditionals. - - Return a tree where each leaf value is L(M_i;Z). - */ + /** + * @brief Compute the model selection term q(μ_X; M, Z) + * given the error for each discrete assignment. + * + * The q(μ) terms are obtained as a result of elimination + * as part of the separator factor. + * + * Perform normalization to handle underflow issues. + * + * @return AlgebraicDecisionTree + */ AlgebraicDecisionTree modelSelection() const; /** @@ -301,18 +296,4 @@ GaussianBayesNetTree addGaussian(const GaussianBayesNetTree &gbnTree, AlgebraicDecisionTree computeLogNormConstants( const GaussianBayesNetValTree &bnTree); -/** - * @brief Compute the model selection term L(M; Z, X) given the error - * and log normalization constants. - * - * Perform normalization to handle underflow issues. - * - * @param errorTree - * @param log_norm_constants - * @return AlgebraicDecisionTree - */ -AlgebraicDecisionTree computeModelSelectionTerm( - const AlgebraicDecisionTree &errorTree, - const AlgebraicDecisionTree &log_norm_constants); - } // namespace gtsam diff --git a/gtsam/hybrid/HybridBayesTree.cpp b/gtsam/hybrid/HybridBayesTree.cpp index cdd30d398..394c88928 100644 --- a/gtsam/hybrid/HybridBayesTree.cpp +++ b/gtsam/hybrid/HybridBayesTree.cpp @@ -111,13 +111,15 @@ AlgebraicDecisionTree HybridBayesTree::modelSelection() const { auto trees = unzip(bn_error); AlgebraicDecisionTree errorTree = trees.second; - // Only compute logNormalizationConstant - AlgebraicDecisionTree log_norm_constants = - computeLogNormConstants(bnTree); - // Compute model selection term (with help from ADT methods) - AlgebraicDecisionTree modelSelectionTerm = - computeModelSelectionTerm(errorTree, log_norm_constants); + AlgebraicDecisionTree modelSelectionTerm = errorTree * -1; + + // Exponentiate using our scheme + double max_log = modelSelectionTerm.max(); + modelSelectionTerm = DecisionTree( + modelSelectionTerm, + [&max_log](const double& x) { return std::exp(x - max_log); }); + modelSelectionTerm = modelSelectionTerm.normalize(modelSelectionTerm.sum()); return modelSelectionTerm; } From f69891895a1338004e330796a414e3bc4e873441 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 6 Mar 2024 16:12:08 -0500 Subject: [PATCH 048/398] additional test based on Frank's colab notebook --- gtsam/hybrid/tests/testMixtureFactor.cpp | 76 ++++++++++++++++++++++++ 1 file changed, 76 insertions(+) diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index 006a3b026..dd2108d2f 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -229,6 +229,82 @@ TEST(MixtureFactor, DifferentCovariances) { EXPECT(assert_equal(expected_values, actual_values)); } +/* ************************************************************************* */ +// Test components with differing means and covariances +TEST(MixtureFactor, DifferentMeansAndCovariances) { + DiscreteKey m1(M(1), 2); + + Values values; + double x1 = 0.0, x2 = 7.0; + values.insert(X(1), x1); + + double between = 0.0; + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + + auto f0 = + std::make_shared>(X(1), X(2), between, model0); + auto f1 = + std::make_shared>(X(1), X(2), between, model1); + std::vector factors{f0, f1}; + + // Create via toFactorGraph + using symbol_shorthand::Z; + Matrix H0_1, H0_2, H1_1, H1_2; + Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); + std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H0_1 /*Sp1*/}, + {X(2), H0_2 /*Tp2*/}}; + + Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); + std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H1_1 /*Sp1*/}, + {X(2), H1_2 /*Tp2*/}}; + auto gm = new gtsam::GaussianMixture( + {Z(1)}, {X(1), X(2)}, {m1}, + {std::make_shared(terms0, 1, -d0, model0), + std::make_shared(terms1, 1, -d1, model1)}); + gtsam::HybridBayesNet bn; + bn.emplace_back(gm); + + gtsam::VectorValues measurements; + measurements.insert(Z(1), gtsam::Z_1x1); + // Create FG with single GaussianMixtureFactor + HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); + + // Linearized prior factor on X1 + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + mixture_fg.push_back(prior); + + // bn.print("BayesNet:"); + // mixture_fg.print("\n\n"); + + VectorValues vv{{X(1), x1 * I_1x1}, {X(2), x2 * I_1x1}}; + // std::cout << "FG error for m1=0: " + // << mixture_fg.error(HybridValues(vv, DiscreteValues{{m1.first, 0}})) + // << std::endl; + // std::cout << "FG error for m1=1: " + // << mixture_fg.error(HybridValues(vv, DiscreteValues{{m1.first, 1}})) + // << std::endl; + + auto hbn = mixture_fg.eliminateSequential(); + + HybridValues actual_values = hbn->optimize(); + + VectorValues cv; + cv.insert(X(1), Vector1(0.0)); + cv.insert(X(2), Vector1(-7.0)); + DiscreteValues dv; + dv.insert({M(1), 1}); + HybridValues expected_values(cv, dv); + + EXPECT(assert_equal(expected_values, actual_values)); +} + /* ************************************************************************* */ int main() { TestResult tr; From c5d6cd5466c01f0a985406cd377b852724c52606 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 3 Apr 2024 16:13:55 +0530 Subject: [PATCH 049/398] wrap DiscreteConditional which takes a vector of doubles --- gtsam/discrete/discrete.i | 3 +++ 1 file changed, 3 insertions(+) diff --git a/gtsam/discrete/discrete.i b/gtsam/discrete/discrete.i index a1731f8e5..0deeb8033 100644 --- a/gtsam/discrete/discrete.i +++ b/gtsam/discrete/discrete.i @@ -104,6 +104,9 @@ virtual class DiscreteConditional : gtsam::DecisionTreeFactor { DiscreteConditional(const gtsam::DecisionTreeFactor& joint, const gtsam::DecisionTreeFactor& marginal, const gtsam::Ordering& orderedKeys); + DiscreteConditional(const gtsam::DiscreteKey& key, + const gtsam::DiscreteKeys& parents, + const std::vector& table); // Standard interface double logNormalizationConstant() const; From c05d17dedfc4d4f943efeb8396f5fb947dac8f71 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 3 Apr 2024 16:14:19 +0530 Subject: [PATCH 050/398] minor formatting in linear.i --- gtsam/linear/linear.i | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/linear/linear.i b/gtsam/linear/linear.i index 6862646ae..8bd3dcb88 100644 --- a/gtsam/linear/linear.i +++ b/gtsam/linear/linear.i @@ -489,7 +489,7 @@ virtual class GaussianConditional : gtsam::JacobianFactor { GaussianConditional(size_t key, Vector d, Matrix R, size_t name1, Matrix S, size_t name2, Matrix T, const gtsam::noiseModel::Diagonal* sigmas); - GaussianConditional(const vector> terms, + GaussianConditional(const std::vector> terms, size_t nrFrontals, Vector d, const gtsam::noiseModel::Diagonal* sigmas); @@ -751,4 +751,4 @@ class KalmanFilter { Vector z, Matrix Q); }; -} \ No newline at end of file +} From bd241f61ecc35560f0f5b958639fcc3515a121b8 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 3 Apr 2024 16:14:46 +0530 Subject: [PATCH 051/398] wrap logProbability and evaluate methods of GaussianMixtureFactor --- gtsam/hybrid/hybrid.i | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index c1d57715e..85c34e575 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -92,7 +92,10 @@ class GaussianMixture : gtsam::HybridFactor { const std::vector& conditionalsList); - gtsam::GaussianMixtureFactor* likelihood(const gtsam::VectorValues &frontals) const; + gtsam::GaussianMixtureFactor* likelihood( + const gtsam::VectorValues& frontals) const; + double logProbability(const gtsam::HybridValues& values) const; + double evaluate(const gtsam::HybridValues& values) const; void print(string s = "GaussianMixture\n", const gtsam::KeyFormatter& keyFormatter = From 8dd2bed6c100b9c576b8fa8b2c7d9aa7bb622e1d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 3 Apr 2024 16:15:21 +0530 Subject: [PATCH 052/398] don't check for 0 continuous keys when printing GaussianMixtureFactor as it is possible to have non-zero discrete keys --- gtsam/hybrid/GaussianMixtureFactor.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 4e138acfa..fd7cec066 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -66,7 +66,7 @@ void GaussianMixtureFactor::print(const std::string &s, [&](const sharedFactor &gf) -> std::string { RedirectCout rd; std::cout << ":\n"; - if (gf && !gf->empty()) { + if (gf) { gf->print("", formatter); return rd.str(); } else { From 8333c25dab31ad83aaa4d89fc62d140bf2abc004 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 3 Apr 2024 16:15:54 +0530 Subject: [PATCH 053/398] Fix slam module README --- gtsam/slam/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/slam/README.md b/gtsam/slam/README.md index ae5edfdac..018126a66 100644 --- a/gtsam/slam/README.md +++ b/gtsam/slam/README.md @@ -63,6 +63,6 @@ A RegularJacobianFactor that uses some badly documented reduction on the Jacobia A RegularJacobianFactor that eliminates a point using sequential elimination. -### JacobianFactorQR +### JacobianFactorSVD -A RegularJacobianFactor that uses the "Nullspace Trick" by Mourikis et al. See the documentation in the file, which *is* well documented. \ No newline at end of file +A RegularJacobianFactor that uses the "Nullspace Trick" by Mourikis et al. See the documentation in the file, which *is* well documented. From a9cf4a0779b42ed1f5bfe208c474acfae495bcc5 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 2 Jul 2024 20:18:24 -0400 Subject: [PATCH 054/398] fix namespacing --- gtsam/linear/linear.i | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/linear/linear.i b/gtsam/linear/linear.i index 577cc8acf..3a629f349 100644 --- a/gtsam/linear/linear.i +++ b/gtsam/linear/linear.i @@ -510,8 +510,8 @@ virtual class GaussianConditional : gtsam::JacobianFactor { GaussianConditional(size_t key, gtsam::Vector d, gtsam::Matrix R, size_t name1, gtsam::Matrix S, size_t name2, gtsam::Matrix T, const gtsam::noiseModel::Diagonal* sigmas); - GaussianConditional(const vector> terms, - size_t nrFrontals, Vector d, + GaussianConditional(const vector> terms, + size_t nrFrontals, gtsam::Vector d, const gtsam::noiseModel::Diagonal* sigmas); // Constructors with no noise model From 113a7f8e6b0eeae0d9dbf4170093e703fce12427 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 5 Aug 2024 11:58:51 -0400 Subject: [PATCH 055/398] added more comments and compute GaussianMixture before tau --- gtsam/hybrid/GaussianMixtureFactor.h | 3 ++- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 17 ++++++++++------- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 913426a98..b1be6ef1d 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -134,7 +134,8 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @return AlgebraicDecisionTree A decision tree with the same keys * as the factors involved, and leaf values as the error. */ - AlgebraicDecisionTree errorTree(const VectorValues &continuousValues) const; + AlgebraicDecisionTree errorTree( + const VectorValues &continuousValues) const; /** * @brief Compute the log-likelihood, including the log-normalizing constant. diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 42402076e..f1b79b123 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -296,7 +296,10 @@ static std::shared_ptr createDiscreteFactor( // Logspace version of: // exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); - return -factor->error(kEmpty) - conditional->logNormalizationConstant(); + // We take negative of the logNormalizationConstant `log(1/k)` + // to get `log(k)`. + // factor->print("Discrete Separator"); + return -factor->error(kEmpty) + (-conditional->logNormalizationConstant()); }; AlgebraicDecisionTree logProbabilities( @@ -368,6 +371,12 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // Perform elimination! DecisionTree eliminationResults(factorGraphTree, eliminate); + // Create the GaussianMixture from the conditionals + GaussianMixture::Conditionals conditionals( + eliminationResults, [](const Result &pair) { return pair.first; }); + auto gaussianMixture = std::make_shared( + frontalKeys, continuousSeparator, discreteSeparator, conditionals); + // If there are no more continuous parents we create a DiscreteFactor with the // error for each discrete choice. Otherwise, create a GaussianMixtureFactor // on the separator, taking care to correct for conditional constants. @@ -377,12 +386,6 @@ hybridElimination(const HybridGaussianFactorGraph &factors, : createGaussianMixtureFactor(eliminationResults, continuousSeparator, discreteSeparator); - // Create the GaussianMixture from the conditionals - GaussianMixture::Conditionals conditionals( - eliminationResults, [](const Result &pair) { return pair.first; }); - auto gaussianMixture = std::make_shared( - frontalKeys, continuousSeparator, discreteSeparator, conditionals); - return {std::make_shared(gaussianMixture), newFactor}; } From 2430abb4bc71ed1bf4bec4079d62c4bc115c2c5b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 7 Aug 2024 09:22:20 -0400 Subject: [PATCH 056/398] test for different error values in BN from MixtureFactor --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 1 - gtsam/hybrid/tests/testMixtureFactor.cpp | 53 +++++++++++++++++++++- 2 files changed, 51 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index f1b79b123..f05dfd423 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -298,7 +298,6 @@ static std::shared_ptr createDiscreteFactor( // exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); // We take negative of the logNormalizationConstant `log(1/k)` // to get `log(k)`. - // factor->print("Discrete Separator"); return -factor->error(kEmpty) + (-conditional->logNormalizationConstant()); }; diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index 006a3b026..c03df4d61 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -161,6 +161,35 @@ TEST(MixtureFactor, DifferentMeans) { DiscreteValues{{M(1), 1}, {M(2), 0}}); EXPECT(assert_equal(expected, actual)); + + { + DiscreteValues dv{{M(1), 0}, {M(2), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 0}, {M(2), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}, {M(2), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}, {M(2), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + } } /* ************************************************************************* */ @@ -217,15 +246,35 @@ TEST(MixtureFactor, DifferentCovariances) { auto hbn = mixture_fg.eliminateSequential(); - HybridValues actual_values = hbn->optimize(); - VectorValues cv; cv.insert(X(1), Vector1(0.0)); cv.insert(X(2), Vector1(0.0)); + + // Check that we get different error values at the MLE point μ. + AlgebraicDecisionTree errorTree = hbn->errorTree(cv); + auto cond0 = hbn->at(0)->asMixture(); + auto cond1 = hbn->at(1)->asMixture(); + auto discrete_cond = hbn->at(2)->asDiscrete(); + + HybridValues hv0(cv, DiscreteValues{{M(1), 0}}); + HybridValues hv1(cv, DiscreteValues{{M(1), 1}}); + AlgebraicDecisionTree expectedErrorTree( + m1, + cond0->error(hv0) // cond0(0)->logNormalizationConstant() + // - cond0(1)->logNormalizationConstant + + cond1->error(hv0) + discrete_cond->error(DiscreteValues{{M(1), 0}}), + cond0->error(hv1) // cond1(0)->logNormalizationConstant() + // - cond1(1)->logNormalizationConstant + + cond1->error(hv1) + + discrete_cond->error(DiscreteValues{{M(1), 0}})); + EXPECT(assert_equal(expectedErrorTree, errorTree)); + DiscreteValues dv; dv.insert({M(1), 1}); HybridValues expected_values(cv, dv); + HybridValues actual_values = hbn->optimize(); + EXPECT(assert_equal(expected_values, actual_values)); } From 6dfd5671b1303785f28717388f76e3578fee45df Mon Sep 17 00:00:00 2001 From: Gary Date: Wed, 7 Aug 2024 15:46:13 -0400 Subject: [PATCH 057/398] Adding test and fix for issue #1596 A Non-active constraint returns a `nullptr`, which needs to be checked for when gathering the keys in `EliminateSymbolic`. --- gtsam/symbolic/SymbolicFactor-inst.h | 4 ++- tests/testGaussianISAM2.cpp | 50 ++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+), 1 deletion(-) diff --git a/gtsam/symbolic/SymbolicFactor-inst.h b/gtsam/symbolic/SymbolicFactor-inst.h index db0eb05ca..a5410aad5 100644 --- a/gtsam/symbolic/SymbolicFactor-inst.h +++ b/gtsam/symbolic/SymbolicFactor-inst.h @@ -42,7 +42,9 @@ namespace gtsam // Gather all keys KeySet allKeys; for(const std::shared_ptr& factor: factors) { - allKeys.insert(factor->begin(), factor->end()); + // Non-active factors are nullptr + if (factor) + allKeys.insert(factor->begin(), factor->end()); } // Check keys diff --git a/tests/testGaussianISAM2.cpp b/tests/testGaussianISAM2.cpp index 1ea60dac9..721ed51c0 100644 --- a/tests/testGaussianISAM2.cpp +++ b/tests/testGaussianISAM2.cpp @@ -994,6 +994,56 @@ TEST(ISAM2, calculate_nnz) EXPECT_LONGS_EQUAL(expected, actual); } +class FixActiveFactor : public NoiseModelFactorN { + using Base = NoiseModelFactorN; + bool is_active_; + +public: + FixActiveFactor(const gtsam::Key& key, const bool active) + : Base(nullptr, key), is_active_(active) {} + + virtual bool active(const gtsam::Values &values) const override { + return is_active_; + } + + virtual Vector + evaluateError(const Vector2& x, + Base::OptionalMatrixTypeT H = nullptr) const override { + if (H) { + *H = Vector2::Identity(); + } + return Vector2::Zero(); + } +}; + +TEST(ActiveFactorTesting, Issue1596) { + // Issue1596: When a derived Nonlinear Factor is not active, the linearization returns a nullptr (NonlinearFactor.cpp:156), which + // causes an error when `EliminateSymbolic` is called (SymbolicFactor-inst.h:45) due to not checking if the factor is nullptr. + const gtsam::Key key{Symbol('x', 0)}; + + ISAM2 isam; + Values values; + NonlinearFactorGraph graph; + + // Insert an active factor + values.insert(key, Vector2::Zero()); + graph.emplace_shared(key, true); + + // No problem here + isam.update(graph, values); + + graph = NonlinearFactorGraph(); + + // Inserting a factor that is never active + graph.emplace_shared(key, false); + + // This call throws the error if the pointer is not validated on (SymbolicFactor-inst.h:45) + isam.update(graph); + + // If the bug is fixed, this line is reached. + EXPECT(isam.getFactorsUnsafe().size() == 2); +} + /* ************************************************************************* */ int main() { TestResult tr; return TestRegistry::runAllTests(tr);} /* ************************************************************************* */ From c22b76506c24da51211b0eb473e92d9df2556233 Mon Sep 17 00:00:00 2001 From: AndreMichelin Date: Mon, 12 Aug 2024 19:03:40 -0700 Subject: [PATCH 058/398] Punctuation/scope --- .../tests/testPreintegratedRotation.cpp | 55 ++++++++++--------- 1 file changed, 28 insertions(+), 27 deletions(-) diff --git a/gtsam/navigation/tests/testPreintegratedRotation.cpp b/gtsam/navigation/tests/testPreintegratedRotation.cpp index 1e27ca1e4..bcd291eb2 100644 --- a/gtsam/navigation/tests/testPreintegratedRotation.cpp +++ b/gtsam/navigation/tests/testPreintegratedRotation.cpp @@ -36,40 +36,33 @@ const Vector3 measuredOmega = trueOmega + bias; const double deltaT = 0.5; } // namespace biased_x_rotation -// Create params where x and y axes are exchanged. -static std::shared_ptr paramsWithTransform() { - auto p = std::make_shared(); - p->setBodyPSensor({Rot3::Yaw(M_PI_2), {0, 0, 0}}); - return p; -} - //****************************************************************************** TEST(PreintegratedRotation, integrateGyroMeasurement) { // Example where IMU is identical to body frame, then omega is roll using namespace biased_x_rotation; auto p = std::make_shared(); - PreintegratedRotation pim(p); // Check the value. Matrix3 H_bias; - internal::IncrementalRotation f{measuredOmega, deltaT, p->getBodyPSensor()}; + const internal::IncrementalRotation f{measuredOmega, deltaT, p->getBodyPSensor()}; const Rot3 incrR = f(bias, H_bias); - Rot3 expected = Rot3::Roll(omega * deltaT); - EXPECT(assert_equal(expected, incrR, 1e-9)); + const Rot3 expected = Rot3::Roll(omega * deltaT); + EXPECT(assert_equal(expected, incrR, 1e-9)) // Check the derivative: - EXPECT(assert_equal(numericalDerivative11(f, bias), H_bias)); + EXPECT(assert_equal(numericalDerivative11(f, bias), H_bias)) // Check value of deltaRij() after integration. Matrix3 F; + PreintegratedRotation pim(p); pim.integrateGyroMeasurement(measuredOmega, bias, deltaT, F); - EXPECT(assert_equal(expected, pim.deltaRij(), 1e-9)); + EXPECT(assert_equal(expected, pim.deltaRij(), 1e-9)) // Check that system matrix F is the first derivative of compose: - EXPECT(assert_equal(pim.deltaRij().inverse().AdjointMap(), F)); + EXPECT(assert_equal(pim.deltaRij().inverse().AdjointMap(), F)) // Make sure delRdelBiasOmega is H_bias after integration. - EXPECT(assert_equal(H_bias, pim.delRdelBiasOmega())); + EXPECT(assert_equal(H_bias, pim.delRdelBiasOmega())) // Check if we make a correction to the bias, the value and Jacobian are // correct. Note that the bias is subtracted from the measurement, and the @@ -78,8 +71,8 @@ TEST(PreintegratedRotation, integrateGyroMeasurement) { const double delta = 0.05; const Vector3 biasOmegaIncr(delta, 0, 0); Rot3 corrected = pim.biascorrectedDeltaRij(biasOmegaIncr, H); - EQUALITY(Vector3(-deltaT * delta, 0, 0), expected.logmap(corrected)); - EXPECT(assert_equal(Rot3::Roll((omega - delta) * deltaT), corrected, 1e-9)); + EQUALITY(Vector3(-deltaT * delta, 0, 0), expected.logmap(corrected)) + EXPECT(assert_equal(Rot3::Roll((omega - delta) * deltaT), corrected, 1e-9)) // TODO(frank): again the derivative is not the *sane* one! // auto g = [&](const Vector3& increment) { @@ -89,39 +82,47 @@ TEST(PreintegratedRotation, integrateGyroMeasurement) { } //****************************************************************************** + +// Create params where x and y axes are exchanged. +static std::shared_ptr paramsWithTransform() { + auto p = std::make_shared(); + p->setBodyPSensor({Rot3::Yaw(M_PI_2), {0, 0, 0}}); + return p; +} + TEST(PreintegratedRotation, integrateGyroMeasurementWithTransform) { // Example where IMU is rotated, so measured omega indicates pitch. using namespace biased_x_rotation; auto p = paramsWithTransform(); - PreintegratedRotation pim(p); // Check the value. Matrix3 H_bias; - internal::IncrementalRotation f{measuredOmega, deltaT, p->getBodyPSensor()}; - Rot3 expected = Rot3::Pitch(omega * deltaT); - EXPECT(assert_equal(expected, f(bias, H_bias), 1e-9)); + const internal::IncrementalRotation f{measuredOmega, deltaT, p->getBodyPSensor()}; + const Rot3 expected = Rot3::Pitch(omega * deltaT); // Pitch, because of sensor-IMU rotation! + EXPECT(assert_equal(expected, f(bias, H_bias), 1e-9)) // Check the derivative: - EXPECT(assert_equal(numericalDerivative11(f, bias), H_bias)); + EXPECT(assert_equal(numericalDerivative11(f, bias), H_bias)) // Check value of deltaRij() after integration. Matrix3 F; + PreintegratedRotation pim(p); pim.integrateGyroMeasurement(measuredOmega, bias, deltaT, F); - EXPECT(assert_equal(expected, pim.deltaRij(), 1e-9)); + EXPECT(assert_equal(expected, pim.deltaRij(), 1e-9)) // Check that system matrix F is the first derivative of compose: - EXPECT(assert_equal(pim.deltaRij().inverse().AdjointMap(), F)); + EXPECT(assert_equal(pim.deltaRij().inverse().AdjointMap(), F)) // Make sure delRdelBiasOmega is H_bias after integration. - EXPECT(assert_equal(H_bias, pim.delRdelBiasOmega())); + EXPECT(assert_equal(H_bias, pim.delRdelBiasOmega())) // Check the bias correction in same way, but will now yield pitch change. Matrix3 H; const double delta = 0.05; const Vector3 biasOmegaIncr(delta, 0, 0); Rot3 corrected = pim.biascorrectedDeltaRij(biasOmegaIncr, H); - EQUALITY(Vector3(0, -deltaT * delta, 0), expected.logmap(corrected)); - EXPECT(assert_equal(Rot3::Pitch((omega - delta) * deltaT), corrected, 1e-9)); + EQUALITY(Vector3(0, -deltaT * delta, 0), expected.logmap(corrected)) + EXPECT(assert_equal(Rot3::Pitch((omega - delta) * deltaT), corrected, 1e-9)) // TODO(frank): again the derivative is not the *sane* one! // auto g = [&](const Vector3& increment) { From c2145bdffccfba6558dd42807e4d3c36075255b1 Mon Sep 17 00:00:00 2001 From: AndreMichelin Date: Mon, 12 Aug 2024 19:55:06 -0700 Subject: [PATCH 059/398] Simplify code as we know the Jacobian = R --- gtsam/navigation/PreintegratedRotation.cpp | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/gtsam/navigation/PreintegratedRotation.cpp b/gtsam/navigation/PreintegratedRotation.cpp index 04e201a34..6c8e510e5 100644 --- a/gtsam/navigation/PreintegratedRotation.cpp +++ b/gtsam/navigation/PreintegratedRotation.cpp @@ -75,13 +75,11 @@ Rot3 IncrementalRotation::operator()( Vector3 correctedOmega = measuredOmega - bias; // Then compensate for sensor-body displacement: we express the quantities - // (originally in the IMU frame) into the body frame. If Jacobian is - // requested, the rotation matrix is obtained as `rotate` Jacobian. - Matrix3 body_R_sensor; + // (originally in the IMU frame) into the body frame. + // Note that the rotate Jacobian is just body_P_sensor->rotation().matrix(). if (body_P_sensor) { // rotation rate vector in the body frame - correctedOmega = body_P_sensor->rotation().rotate( - correctedOmega, {}, H_bias ? &body_R_sensor : nullptr); + correctedOmega = body_P_sensor->rotation() * correctedOmega; } // rotation vector describing rotation increment computed from the @@ -90,7 +88,7 @@ Rot3 IncrementalRotation::operator()( Rot3 incrR = Rot3::Expmap(integratedOmega, H_bias); // expensive !! if (H_bias) { *H_bias *= -deltaT; // Correct so accurately reflects bias derivative - if (body_P_sensor) *H_bias *= body_R_sensor; + if (body_P_sensor) *H_bias *= body_P_sensor->rotation().matrix(); } return incrR; } From 37696b274e1ce9be3169ca5959cb5da50139a0d8 Mon Sep 17 00:00:00 2001 From: AndreMichelin Date: Mon, 12 Aug 2024 19:55:20 -0700 Subject: [PATCH 060/398] Adding more tests --- .../tests/testPreintegratedRotation.cpp | 84 ++++++++++++++++--- 1 file changed, 74 insertions(+), 10 deletions(-) diff --git a/gtsam/navigation/tests/testPreintegratedRotation.cpp b/gtsam/navigation/tests/testPreintegratedRotation.cpp index bcd291eb2..d84af031e 100644 --- a/gtsam/navigation/tests/testPreintegratedRotation.cpp +++ b/gtsam/navigation/tests/testPreintegratedRotation.cpp @@ -74,11 +74,18 @@ TEST(PreintegratedRotation, integrateGyroMeasurement) { EQUALITY(Vector3(-deltaT * delta, 0, 0), expected.logmap(corrected)) EXPECT(assert_equal(Rot3::Roll((omega - delta) * deltaT), corrected, 1e-9)) - // TODO(frank): again the derivative is not the *sane* one! - // auto g = [&](const Vector3& increment) { - // return pim.biascorrectedDeltaRij(increment, {}); - // }; - // EXPECT(assert_equal(numericalDerivative11(g, Z_3x1), H)); + auto g = [&](const Vector3& increment) { + return pim.biascorrectedDeltaRij(increment, {}); + }; + const Matrix3 sane = numericalDerivative11(g, Z_3x1); + EXPECT(assert_equal(sane, pim.delRdelBiasOmega())); + // TODO(frank): the derivative H we compute in biascorrectedDeltaRij is not the *sane* one! + // EXPECT(assert_equal(sane, H)); + + // Let's integrate a second IMU measurement and check the Jacobian update: + pim.integrateGyroMeasurement(measuredOmega, bias, deltaT); + const Matrix3 sane2 = numericalDerivative11(g, Z_3x1); + EXPECT(assert_equal(sane2, pim.delRdelBiasOmega())); } //****************************************************************************** @@ -124,11 +131,68 @@ TEST(PreintegratedRotation, integrateGyroMeasurementWithTransform) { EQUALITY(Vector3(0, -deltaT * delta, 0), expected.logmap(corrected)) EXPECT(assert_equal(Rot3::Pitch((omega - delta) * deltaT), corrected, 1e-9)) - // TODO(frank): again the derivative is not the *sane* one! - // auto g = [&](const Vector3& increment) { - // return pim.biascorrectedDeltaRij(increment, {}); - // }; - // EXPECT(assert_equal(numericalDerivative11(g, Z_3x1), H)); + auto g = [&](const Vector3& increment) { + return pim.biascorrectedDeltaRij(increment, {}); + }; + const Matrix3 sane = numericalDerivative11(g, Z_3x1); + EXPECT(assert_equal(sane, pim.delRdelBiasOmega())); + // TODO(frank): the derivative H we compute in biascorrectedDeltaRij is not the *sane* one! + // EXPECT(assert_equal(sane, H)); + + // Let's integrate a second IMU measurement and check the Jacobian update: + pim.integrateGyroMeasurement(measuredOmega, bias, deltaT); + const Matrix3 sane2 = numericalDerivative11(g, Z_3x1); + EXPECT(assert_equal(sane2, pim.delRdelBiasOmega())); +} + +// Create params we have a non-axis-aligned rotation and even an offset. +static std::shared_ptr paramsWithArbitraryTransform() { + auto p = std::make_shared(); + p->setBodyPSensor({Rot3::Expmap({1,2,3}), {4,5,6}}); + return p; +} + +TEST(PreintegratedRotation, integrateGyroMeasurementWithArbitraryTransform) { + // Example with a non-axis-aligned transform and some position. + using namespace biased_x_rotation; + auto p = paramsWithArbitraryTransform(); + + // Check the derivative: + Matrix3 H_bias; + const internal::IncrementalRotation f{measuredOmega, deltaT, p->getBodyPSensor()}; + f(bias, H_bias); + EXPECT(assert_equal(numericalDerivative11(f, bias), H_bias)) + + // Check derivative of deltaRij() after integration. + Matrix3 F; + PreintegratedRotation pim(p); + pim.integrateGyroMeasurement(measuredOmega, bias, deltaT, F); + + // Check that system matrix F is the first derivative of compose: + EXPECT(assert_equal(pim.deltaRij().inverse().AdjointMap(), F)) + + // Make sure delRdelBiasOmega is H_bias after integration. + EXPECT(assert_equal(H_bias, pim.delRdelBiasOmega())) + + // Check the bias correction in same way, but will now yield pitch change. + Matrix3 H; + const double delta = 0.05; + const Vector3 biasOmegaIncr(delta, 0, 0); + Rot3 corrected = pim.biascorrectedDeltaRij(biasOmegaIncr, H); + + auto g = [&](const Vector3& increment) { + return pim.biascorrectedDeltaRij(increment, {}); + }; + const Matrix3 sane = numericalDerivative11(g, Z_3x1); + EXPECT(assert_equal(sane, pim.delRdelBiasOmega())); + // TODO(frank): the derivative H we compute in biascorrectedDeltaRij is not the *sane* one! + // EXPECT(assert_equal(sane, H)); + + // Let's integrate a second IMU measurement and check the Jacobian update: + pim.integrateGyroMeasurement(measuredOmega, bias, deltaT); + const Matrix3 sane2 = numericalDerivative11(g, Z_3x1); + EXPECT(assert_equal(sane2, pim.delRdelBiasOmega())); + } //****************************************************************************** From 1c40b17fac65dc752f31340daf73a434590c11ad Mon Sep 17 00:00:00 2001 From: AndreMichelin Date: Tue, 13 Aug 2024 17:32:14 -0700 Subject: [PATCH 061/398] Some tests on Expmap/expmap chain rule --- gtsam/geometry/tests/testPose3.cpp | 25 +++++++++++++++++++ gtsam/geometry/tests/testRot3.cpp | 40 ++++++++++++++++++++++++++++++ 2 files changed, 65 insertions(+) diff --git a/gtsam/geometry/tests/testPose3.cpp b/gtsam/geometry/tests/testPose3.cpp index 93cf99972..065d43bf9 100644 --- a/gtsam/geometry/tests/testPose3.cpp +++ b/gtsam/geometry/tests/testPose3.cpp @@ -1207,6 +1207,31 @@ TEST(Pose3, Print) { EXPECT(assert_print_equal(expected, pose)); } +/* ************************************************************************* */ +TEST(Pose3, ExpmapChainRule) { + // Muliply with an arbitrary matrix and exponentiate + Matrix6 M; + M << 1, 2, 3, 4, 5, 6, // + 7, 8, 9, 1, 2, 3, // + 4, 5, 6, 7, 8, 9, // + 1, 2, 3, 4, 5, 6, // + 7, 8, 9, 1, 2, 3, // + 4, 5, 6, 7, 8, 9; + auto g = [&](const Vector6& omega) { + return Pose3::Expmap(M*omega); + }; + + // Test the derivatives at zero + const Matrix6 expected = numericalDerivative11(g, Z_6x1); + EXPECT(assert_equal(expected, M)); // Pose3::ExpmapDerivative(Z_6x1) is identity + + // Test the derivatives at another value + const Vector6 delta{0.1, 0.2, 0.3, 0.4, 0.5, 0.6}; + const Matrix6 expected2 = numericalDerivative11(g, delta); + const Matrix6 analytic = Pose3::ExpmapDerivative(M*delta) * M; + EXPECT(assert_equal(expected2, analytic, 1e-5)); // note tolerance +} + /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/geometry/tests/testRot3.cpp b/gtsam/geometry/tests/testRot3.cpp index 1232348f0..9555a2445 100644 --- a/gtsam/geometry/tests/testRot3.cpp +++ b/gtsam/geometry/tests/testRot3.cpp @@ -956,6 +956,46 @@ TEST(Rot3, determinant) { } } +/* ************************************************************************* */ +TEST(Rot3, ExpmapChainRule) { + // Muliply with an arbitrary matrix and exponentiate + Matrix3 M; + M << 1, 2, 3, 4, 5, 6, 7, 8, 9; + auto g = [&](const Vector3& omega) { + return Rot3::Expmap(M*omega); + }; + + // Test the derivatives at zero + const Matrix3 expected = numericalDerivative11(g, Z_3x1); + EXPECT(assert_equal(expected, M)); // SO3::ExpmapDerivative(Z_3x1) is identity + + // Test the derivatives at another value + const Vector3 delta{0.1,0.2,0.3}; + const Matrix3 expected2 = numericalDerivative11(g, delta); + EXPECT(assert_equal(expected2, SO3::ExpmapDerivative(M*delta) * M)); +} + +/* ************************************************************************* */ +TEST(Rot3, expmapChainRule) { + // Muliply an arbitrary rotation with exp(M*x) + // Perhaps counter-intuitively, this has the same derivatives as above + Matrix3 M; + M << 1, 2, 3, 4, 5, 6, 7, 8, 9; + const Rot3 R = Rot3::Expmap({1, 2, 3}); + auto g = [&](const Vector3& omega) { + return R.expmap(M*omega); + }; + + // Test the derivatives at zero + const Matrix3 expected = numericalDerivative11(g, Z_3x1); + EXPECT(assert_equal(expected, M)); + + // Test the derivatives at another value + const Vector3 delta{0.1,0.2,0.3}; + const Matrix3 expected2 = numericalDerivative11(g, delta); + EXPECT(assert_equal(expected2, SO3::ExpmapDerivative(M*delta) * M)); +} + /* ************************************************************************* */ int main() { TestResult tr; From ac96b59469a1360ff4c8c06320b8447b94dd6b49 Mon Sep 17 00:00:00 2001 From: AndreMichelin Date: Wed, 14 Aug 2024 10:50:32 -0700 Subject: [PATCH 062/398] Fix jacobian tests --- .../tests/testPreintegratedRotation.cpp | 39 +++++++++---------- 1 file changed, 19 insertions(+), 20 deletions(-) diff --git a/gtsam/navigation/tests/testPreintegratedRotation.cpp b/gtsam/navigation/tests/testPreintegratedRotation.cpp index d84af031e..468701e3c 100644 --- a/gtsam/navigation/tests/testPreintegratedRotation.cpp +++ b/gtsam/navigation/tests/testPreintegratedRotation.cpp @@ -74,18 +74,18 @@ TEST(PreintegratedRotation, integrateGyroMeasurement) { EQUALITY(Vector3(-deltaT * delta, 0, 0), expected.logmap(corrected)) EXPECT(assert_equal(Rot3::Roll((omega - delta) * deltaT), corrected, 1e-9)) + // Check the derivative matches the numerical one auto g = [&](const Vector3& increment) { return pim.biascorrectedDeltaRij(increment, {}); }; - const Matrix3 sane = numericalDerivative11(g, Z_3x1); - EXPECT(assert_equal(sane, pim.delRdelBiasOmega())); - // TODO(frank): the derivative H we compute in biascorrectedDeltaRij is not the *sane* one! - // EXPECT(assert_equal(sane, H)); - + Matrix3 expectedH = numericalDerivative11(g, biasOmegaIncr); + EXPECT(assert_equal(expectedH, H)); + // Let's integrate a second IMU measurement and check the Jacobian update: pim.integrateGyroMeasurement(measuredOmega, bias, deltaT); - const Matrix3 sane2 = numericalDerivative11(g, Z_3x1); - EXPECT(assert_equal(sane2, pim.delRdelBiasOmega())); + expectedH = numericalDerivative11(g, biasOmegaIncr); + corrected = pim.biascorrectedDeltaRij(biasOmegaIncr, H); + EXPECT(assert_equal(expectedH, H)); } //****************************************************************************** @@ -131,18 +131,18 @@ TEST(PreintegratedRotation, integrateGyroMeasurementWithTransform) { EQUALITY(Vector3(0, -deltaT * delta, 0), expected.logmap(corrected)) EXPECT(assert_equal(Rot3::Pitch((omega - delta) * deltaT), corrected, 1e-9)) + // Check the derivative matches the *expectedH* one auto g = [&](const Vector3& increment) { return pim.biascorrectedDeltaRij(increment, {}); }; - const Matrix3 sane = numericalDerivative11(g, Z_3x1); - EXPECT(assert_equal(sane, pim.delRdelBiasOmega())); - // TODO(frank): the derivative H we compute in biascorrectedDeltaRij is not the *sane* one! - // EXPECT(assert_equal(sane, H)); + Matrix3 expectedH = numericalDerivative11(g, biasOmegaIncr); + EXPECT(assert_equal(expectedH, H)); // Let's integrate a second IMU measurement and check the Jacobian update: pim.integrateGyroMeasurement(measuredOmega, bias, deltaT); - const Matrix3 sane2 = numericalDerivative11(g, Z_3x1); - EXPECT(assert_equal(sane2, pim.delRdelBiasOmega())); + corrected = pim.biascorrectedDeltaRij(biasOmegaIncr, H); + expectedH = numericalDerivative11(g, biasOmegaIncr); + EXPECT(assert_equal(expectedH, H)); } // Create params we have a non-axis-aligned rotation and even an offset. @@ -180,19 +180,18 @@ TEST(PreintegratedRotation, integrateGyroMeasurementWithArbitraryTransform) { const Vector3 biasOmegaIncr(delta, 0, 0); Rot3 corrected = pim.biascorrectedDeltaRij(biasOmegaIncr, H); + // Check the derivative matches the numerical one auto g = [&](const Vector3& increment) { return pim.biascorrectedDeltaRij(increment, {}); }; - const Matrix3 sane = numericalDerivative11(g, Z_3x1); - EXPECT(assert_equal(sane, pim.delRdelBiasOmega())); - // TODO(frank): the derivative H we compute in biascorrectedDeltaRij is not the *sane* one! - // EXPECT(assert_equal(sane, H)); + Matrix3 expectedH = numericalDerivative11(g, biasOmegaIncr); + EXPECT(assert_equal(expectedH, H)); // Let's integrate a second IMU measurement and check the Jacobian update: pim.integrateGyroMeasurement(measuredOmega, bias, deltaT); - const Matrix3 sane2 = numericalDerivative11(g, Z_3x1); - EXPECT(assert_equal(sane2, pim.delRdelBiasOmega())); - + corrected = pim.biascorrectedDeltaRij(biasOmegaIncr, H); + expectedH = numericalDerivative11(g, biasOmegaIncr); + EXPECT(assert_equal(expectedH, H)); } //****************************************************************************** From 6412517d3fe2df44a37bfae8c939aa05b2b9e24c Mon Sep 17 00:00:00 2001 From: AndreMichelin Date: Wed, 14 Aug 2024 17:53:18 -0700 Subject: [PATCH 063/398] Fix and comment another test --- .../tests/testManifoldPreintegration.cpp | 28 +++++++++++-------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/gtsam/navigation/tests/testManifoldPreintegration.cpp b/gtsam/navigation/tests/testManifoldPreintegration.cpp index 4016240cf..82f9876fb 100644 --- a/gtsam/navigation/tests/testManifoldPreintegration.cpp +++ b/gtsam/navigation/tests/testManifoldPreintegration.cpp @@ -118,17 +118,18 @@ TEST(ManifoldPreintegration, CompareWithPreintegratedRotation) { // Integrate a single measurement const double omega = 0.1; const Vector3 trueOmega(omega, 0, 0); - const Vector3 bias(1, 2, 3); - const Vector3 measuredOmega = trueOmega + bias; + const Vector3 biasOmega(1, 2, 3); + const Vector3 measuredOmega = trueOmega + biasOmega; const double deltaT = 0.5; // Check the accumulated rotation. Rot3 expected = Rot3::Roll(omega * deltaT); - pim.integrateGyroMeasurement(measuredOmega, bias, deltaT); + const Vector biasOmegaHat = biasOmega; + pim.integrateGyroMeasurement(measuredOmega, biasOmegaHat, deltaT); EXPECT(assert_equal(expected, pim.deltaRij(), 1e-9)); // Now do the same for a ManifoldPreintegration object - imuBias::ConstantBias biasHat {Z_3x1, bias}; + imuBias::ConstantBias biasHat {Z_3x1, biasOmega}; ManifoldPreintegration manifoldPim(testing::Params(), biasHat); manifoldPim.integrateMeasurement(Z_3x1, measuredOmega, deltaT); EXPECT(assert_equal(expected, manifoldPim.deltaRij(), 1e-9)); @@ -136,17 +137,21 @@ TEST(ManifoldPreintegration, CompareWithPreintegratedRotation) { // Check their internal Jacobians are the same: EXPECT(assert_equal(pim.delRdelBiasOmega(), manifoldPim.delRdelBiasOmega())); - // Check PreintegratedRotation::biascorrectedDeltaRij. - Matrix3 H; + // Let's check the derivatives a delta away from the bias hat const double delta = 0.05; const Vector3 biasOmegaIncr(delta, 0, 0); + imuBias::ConstantBias bias_i {Z_3x1, biasOmegaHat + biasOmegaIncr}; + + // Check PreintegratedRotation::biascorrectedDeltaRij. + // Note that biascorrectedDeltaRij expects the biasHat to be subtracted already + Matrix3 H; Rot3 corrected = pim.biascorrectedDeltaRij(biasOmegaIncr, H); EQUALITY(Vector3(-deltaT * delta, 0, 0), expected.logmap(corrected)); const Rot3 expected2 = Rot3::Roll((omega - delta) * deltaT); EXPECT(assert_equal(expected2, corrected, 1e-9)); // Check ManifoldPreintegration::biasCorrectedDelta. - imuBias::ConstantBias bias_i {Z_3x1, bias + biasOmegaIncr}; + // Note that, confusingly, biasCorrectedDelta will subtract biasHat inside Matrix96 H2; Vector9 biasCorrected = manifoldPim.biasCorrectedDelta(bias_i, H2); Vector9 expected3; @@ -154,12 +159,11 @@ TEST(ManifoldPreintegration, CompareWithPreintegratedRotation) { EXPECT(assert_equal(expected3, biasCorrected, 1e-9)); // Check that this one is sane: - auto g = [&](const Vector3& increment) { - return manifoldPim.biasCorrectedDelta({Z_3x1, bias + increment}, {}); + auto g = [&](const Vector3& b) { + return manifoldPim.biasCorrectedDelta({Z_3x1, b}, {}); }; - EXPECT(assert_equal(numericalDerivative11(g, Z_3x1), - H2.rightCols<3>(), - 1e-4)); // NOTE(frank): reduced tolerance + EXPECT(assert_equal(numericalDerivative11(g, bias_i.gyroscope()), + H2.rightCols<3>())); } /* ************************************************************************* */ From a3afa5740a4f29cab512f50e6685c157e2d2d0d6 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 15 Aug 2024 17:13:19 -0700 Subject: [PATCH 064/398] Fix c++ warnings --- gtsam/3rdparty/Eigen/Eigen/src/SparseCore/TriangularSolver.h | 2 -- .../Eigen/Eigen/src/SparseLU/SparseLU_heap_relax_snode.h | 5 ----- gtsam/3rdparty/metis/GKlib/pdb.c | 2 +- 3 files changed, 1 insertion(+), 8 deletions(-) diff --git a/gtsam/3rdparty/Eigen/Eigen/src/SparseCore/TriangularSolver.h b/gtsam/3rdparty/Eigen/Eigen/src/SparseCore/TriangularSolver.h index f9c56ba79..6b5fdb3e6 100644 --- a/gtsam/3rdparty/Eigen/Eigen/src/SparseCore/TriangularSolver.h +++ b/gtsam/3rdparty/Eigen/Eigen/src/SparseCore/TriangularSolver.h @@ -270,11 +270,9 @@ struct sparse_solve_triangular_sparse_selector } - Index count = 0; // FIXME compute a reference value to filter zeros for (typename AmbiVector::Iterator it(tempVector/*,1e-12*/); it; ++it) { - ++ count; // std::cerr << "fill " << it.index() << ", " << col << "\n"; // std::cout << it.value() << " "; // FIXME use insertBack diff --git a/gtsam/3rdparty/Eigen/Eigen/src/SparseLU/SparseLU_heap_relax_snode.h b/gtsam/3rdparty/Eigen/Eigen/src/SparseLU/SparseLU_heap_relax_snode.h index 6f75d500e..7aecbcad8 100644 --- a/gtsam/3rdparty/Eigen/Eigen/src/SparseLU/SparseLU_heap_relax_snode.h +++ b/gtsam/3rdparty/Eigen/Eigen/src/SparseLU/SparseLU_heap_relax_snode.h @@ -75,8 +75,6 @@ void SparseLUImpl::heap_relax_snode (const Index n, IndexVe // Identify the relaxed supernodes by postorder traversal of the etree Index snode_start; // beginning of a snode StorageIndex k; - Index nsuper_et_post = 0; // Number of relaxed snodes in postordered etree - Index nsuper_et = 0; // Number of relaxed snodes in the original etree StorageIndex l; for (j = 0; j < n; ) { @@ -88,7 +86,6 @@ void SparseLUImpl::heap_relax_snode (const Index n, IndexVe parent = et(j); } // Found a supernode in postordered etree, j is the last column - ++nsuper_et_post; k = StorageIndex(n); for (Index i = snode_start; i <= j; ++i) k = (std::min)(k, inv_post(i)); @@ -97,7 +94,6 @@ void SparseLUImpl::heap_relax_snode (const Index n, IndexVe { // This is also a supernode in the original etree relax_end(k) = l; // Record last column - ++nsuper_et; } else { @@ -107,7 +103,6 @@ void SparseLUImpl::heap_relax_snode (const Index n, IndexVe if (descendants(i) == 0) { relax_end(l) = l; - ++nsuper_et; } } } diff --git a/gtsam/3rdparty/metis/GKlib/pdb.c b/gtsam/3rdparty/metis/GKlib/pdb.c index b4d222653..018846604 100644 --- a/gtsam/3rdparty/metis/GKlib/pdb.c +++ b/gtsam/3rdparty/metis/GKlib/pdb.c @@ -131,7 +131,7 @@ that structure. /************************************************************************/ pdbf *gk_readpdbfile(char *fname) { /* {{{ */ int i=0, res=0; - char linetype[6]; + char linetype[7]; int aserial; char aname[5] = " \0"; char altLoc = ' '; From 0f23eedbd063fe982f913392267b02b3e3617d30 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 19 Aug 2024 11:39:02 -0400 Subject: [PATCH 065/398] fix issue in geometry.i file --- gtsam/geometry/geometry.i | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/geometry/geometry.i b/gtsam/geometry/geometry.i index 3d816fc25..095a350dd 100644 --- a/gtsam/geometry/geometry.i +++ b/gtsam/geometry/geometry.i @@ -856,7 +856,7 @@ class Cal3_S2Stereo { gtsam::Matrix K() const; gtsam::Point2 principalPoint() const; double baseline() const; - Vector6 vector() const; + gtsam::Vector6 vector() const; gtsam::Matrix inverse() const; }; From 3c722acedc220b4daf145104f494c3d3f9d6e172 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 07:51:00 -0400 Subject: [PATCH 066/398] update GaussianMixtureFactor to record normalizers, and add unit tests --- gtsam/hybrid/GaussianMixtureFactor.cpp | 79 ++++++++- gtsam/hybrid/GaussianMixtureFactor.h | 13 +- .../tests/testGaussianMixtureFactor.cpp | 167 +++++++++++++++++- 3 files changed, 250 insertions(+), 9 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 4e138acfa..e519cefe6 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -28,11 +28,86 @@ namespace gtsam { +/** + * @brief Helper function to correct the [A|b] matrices in the factor components + * with the normalizer values. + * This is done by storing the normalizer value in + * the `b` vector as an additional row. + * + * @param factors DecisionTree of GaussianFactor shared pointers. + * @param varyingNormalizers Flag indicating the normalizers are different for + * each component. + * @return GaussianMixtureFactor::Factors + */ +GaussianMixtureFactor::Factors correct( + const GaussianMixtureFactor::Factors &factors, bool varyingNormalizers) { + if (!varyingNormalizers) { + return factors; + } + + // First compute all the sqrt(|2 pi Sigma|) terms + auto computeNormalizers = [](const GaussianMixtureFactor::sharedFactor &gf) { + auto jf = std::dynamic_pointer_cast(gf); + // If we have, say, a Hessian factor, then no need to do anything + if (!jf) return 0.0; + + auto model = jf->get_model(); + // If there is no noise model, there is nothing to do. + if (!model) { + return 0.0; + } + // Since noise models are Gaussian, we can get the logDeterminant using the + // same trick as in GaussianConditional + double logDetR = + model->R().diagonal().unaryExpr([](double x) { return log(x); }).sum(); + double logDeterminantSigma = -2.0 * logDetR; + + size_t n = model->dim(); + constexpr double log2pi = 1.8378770664093454835606594728112; + return n * log2pi + logDeterminantSigma; + }; + + AlgebraicDecisionTree log_normalizers = + DecisionTree(factors, computeNormalizers); + + // Find the minimum value so we can "proselytize" to positive values. + // Done because we can't have sqrt of negative numbers. + double min_log_normalizer = log_normalizers.min(); + log_normalizers = log_normalizers.apply( + [&min_log_normalizer](double n) { return n - min_log_normalizer; }); + + // Finally, update the [A|b] matrices. + auto update = [&log_normalizers]( + const Assignment &assignment, + const GaussianMixtureFactor::sharedFactor &gf) { + auto jf = std::dynamic_pointer_cast(gf); + if (!jf) return gf; + // If there is no noise model, there is nothing to do. + if (!jf->get_model()) return gf; + // If the log_normalizer is 0, do nothing + if (log_normalizers(assignment) == 0.0) return gf; + + GaussianFactorGraph gfg; + gfg.push_back(jf); + + Vector c(1); + c << std::sqrt(log_normalizers(assignment)); + auto constantFactor = std::make_shared(c); + + gfg.push_back(constantFactor); + return std::dynamic_pointer_cast( + std::make_shared(gfg)); + }; + return factors.apply(update); +} + /* *******************************************************************************/ GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors) - : Base(continuousKeys, discreteKeys), factors_(factors) {} + const Factors &factors, + bool varyingNormalizers) + : Base(continuousKeys, discreteKeys), + factors_(correct(factors, varyingNormalizers)) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index b1be6ef1d..588501bbe 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -82,10 +82,13 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * their cardinalities. * @param factors The decision tree of Gaussian factors stored as the mixture * density. + * @param varyingNormalizers Flag indicating factor components have varying + * normalizer values. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors); + const Factors &factors, + bool varyingNormalizers = false); /** * @brief Construct a new GaussianMixtureFactor object using a vector of @@ -94,12 +97,16 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. + * @param varyingNormalizers Flag indicating factor components have varying + * normalizer values. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const std::vector &factors) + const std::vector &factors, + bool varyingNormalizers = false) : GaussianMixtureFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors)) {} + Factors(discreteKeys, factors), + varyingNormalizers) {} /// @} /// @name Testable diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 11b01f074..55ecce939 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -22,9 +22,13 @@ #include #include #include +#include +#include #include #include #include +#include +#include // Include for test suite #include @@ -56,7 +60,6 @@ TEST(GaussianMixtureFactor, Sum) { auto b = Matrix::Zero(2, 1); Vector2 sigmas; sigmas << 1, 2; - auto model = noiseModel::Diagonal::Sigmas(sigmas, true); auto f10 = std::make_shared(X(1), A1, X(2), A2, b); auto f11 = std::make_shared(X(1), A1, X(2), A2, b); @@ -179,7 +182,8 @@ TEST(GaussianMixtureFactor, Error) { continuousValues.insert(X(2), Vector2(1, 1)); // error should return a tree of errors, with nodes for each discrete value. - AlgebraicDecisionTree error_tree = mixtureFactor.errorTree(continuousValues); + AlgebraicDecisionTree error_tree = + mixtureFactor.errorTree(continuousValues); std::vector discrete_keys = {m1}; // Error values for regression test @@ -192,8 +196,163 @@ TEST(GaussianMixtureFactor, Error) { DiscreteValues discreteValues; discreteValues[m1.first] = 1; EXPECT_DOUBLES_EQUAL( - 4.0, mixtureFactor.error({continuousValues, discreteValues}), - 1e-9); + 4.0, mixtureFactor.error({continuousValues, discreteValues}), 1e-9); +} + +/* ************************************************************************* */ +// Test components with differing means +TEST(GaussianMixtureFactor, DifferentMeans) { + DiscreteKey m1(M(1), 2), m2(M(2), 2); + + Values values; + double x1 = 0.0, x2 = 1.75, x3 = 2.60; + values.insert(X(1), x1); + values.insert(X(2), x2); + values.insert(X(3), x3); + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e-0); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-0); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-0); + + auto f0 = std::make_shared>(X(1), X(2), 0.0, model0) + ->linearize(values); + auto f1 = std::make_shared>(X(1), X(2), 2.0, model1) + ->linearize(values); + std::vector factors{f0, f1}; + + GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors, true); + HybridGaussianFactorGraph hfg; + hfg.push_back(mixtureFactor); + + f0 = std::make_shared>(X(2), X(3), 0.0, model0) + ->linearize(values); + f1 = std::make_shared>(X(2), X(3), 2.0, model1) + ->linearize(values); + std::vector factors23{f0, f1}; + hfg.push_back(GaussianMixtureFactor({X(2), X(3)}, {m2}, factors23, true)); + + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + hfg.push_back(prior); + + hfg.push_back(PriorFactor(X(2), 2.0, prior_noise).linearize(values)); + + auto bn = hfg.eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{ + {X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}, {X(3), Vector1(-0.6)}}, + DiscreteValues{{M(1), 1}, {M(2), 0}}); + + EXPECT(assert_equal(expected, actual)); + + { + DiscreteValues dv{{M(1), 0}, {M(2), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 0}, {M(2), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}, {M(2), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}, {M(2), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + } +} + +/* ************************************************************************* */ +/** + * @brief Test components with differing covariances. + * The factor graph is + * *-X1-*-X2 + * | + * M1 + */ +TEST(GaussianMixtureFactor, DifferentCovariances) { + DiscreteKey m1(M(1), 2); + + Values values; + double x1 = 1.0, x2 = 1.0; + values.insert(X(1), x1); + values.insert(X(2), x2); + + double between = 0.0; + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + + auto f0 = + std::make_shared>(X(1), X(2), between, model0); + auto f1 = + std::make_shared>(X(1), X(2), between, model1); + std::vector factors{f0, f1}; + + // Create via toFactorGraph + using symbol_shorthand::Z; + Matrix H0_1, H0_2, H1_1, H1_2; + Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); + std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H0_1 /*Sp1*/}, + {X(2), H0_2 /*Tp2*/}}; + + Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); + std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H1_1 /*Sp1*/}, + {X(2), H1_2 /*Tp2*/}}; + gtsam::GaussianMixtureFactor gmf( + {X(1), X(2)}, {m1}, + {std::make_shared(X(1), H0_1, X(2), H0_2, -d0, model0), + std::make_shared(X(1), H1_1, X(2), H1_2, -d1, model1)}, + true); + + // Create FG with single GaussianMixtureFactor + HybridGaussianFactorGraph mixture_fg; + mixture_fg.add(gmf); + + // Linearized prior factor on X1 + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + mixture_fg.push_back(prior); + + auto hbn = mixture_fg.eliminateSequential(); + // hbn->print(); + + VectorValues cv; + cv.insert(X(1), Vector1(0.0)); + cv.insert(X(2), Vector1(0.0)); + + // Check that the error values at the MLE point μ. + AlgebraicDecisionTree errorTree = hbn->errorTree(cv); + + DiscreteValues dv0{{M(1), 0}}; + DiscreteValues dv1{{M(1), 1}}; + + // regression + EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv0), 1e-9); + EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); + + DiscreteConditional expected_m1(m1, "0.5/0.5"); + DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); + + EXPECT(assert_equal(expected_m1, actual_m1)); } /* ************************************************************************* */ From d4e5a9be5d3b3fe6ec31e5238a8879d6ca77ccb7 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 07:57:15 -0400 Subject: [PATCH 067/398] different means test both via direct factor definition and toFactorGraph --- .../tests/testGaussianMixtureFactor.cpp | 79 ++++++++++++++++++- 1 file changed, 78 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 55ecce939..47b9ddc99 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -346,7 +346,84 @@ TEST(GaussianMixtureFactor, DifferentCovariances) { DiscreteValues dv1{{M(1), 1}}; // regression - EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv0), 1e-9); + EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); + EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); + + DiscreteConditional expected_m1(m1, "0.5/0.5"); + DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); + + EXPECT(assert_equal(expected_m1, actual_m1)); +} + +/* ************************************************************************* */ +/** + * @brief Test components with differing covariances + * but with a Bayes net P(Z|X, M) converted to a FG. + */ +TEST(GaussianMixtureFactor, DifferentCovariances2) { + DiscreteKey m1(M(1), 2); + + Values values; + double x1 = 1.0, x2 = 1.0; + values.insert(X(1), x1); + values.insert(X(2), x2); + + double between = 0.0; + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + + auto f0 = + std::make_shared>(X(1), X(2), between, model0); + auto f1 = + std::make_shared>(X(1), X(2), between, model1); + std::vector factors{f0, f1}; + + // Create via toFactorGraph + using symbol_shorthand::Z; + Matrix H0_1, H0_2, H1_1, H1_2; + Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); + std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H0_1 /*Sp1*/}, + {X(2), H0_2 /*Tp2*/}}; + + Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); + std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H1_1 /*Sp1*/}, + {X(2), H1_2 /*Tp2*/}}; + auto gm = new gtsam::GaussianMixture( + {Z(1)}, {X(1), X(2)}, {m1}, + {std::make_shared(terms0, 1, -d0, model0), + std::make_shared(terms1, 1, -d1, model1)}); + gtsam::HybridBayesNet bn; + bn.emplace_back(gm); + + gtsam::VectorValues measurements; + measurements.insert(Z(1), gtsam::Z_1x1); + // Create FG with single GaussianMixtureFactor + HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); + + // Linearized prior factor on X1 + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + mixture_fg.push_back(prior); + + auto hbn = mixture_fg.eliminateSequential(); + + VectorValues cv; + cv.insert(X(1), Vector1(0.0)); + cv.insert(X(2), Vector1(0.0)); + + // Check that the error values at the MLE point μ. + AlgebraicDecisionTree errorTree = hbn->errorTree(cv); + + DiscreteValues dv0{{M(1), 0}}; + DiscreteValues dv1{{M(1), 1}}; + + // regression + EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); DiscreteConditional expected_m1(m1, "0.5/0.5"); From fef929f2664a1911226c17f47be47e456bad73af Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 12:53:53 -0400 Subject: [PATCH 068/398] clean up model selection --- gtsam/hybrid/HybridBayesNet.cpp | 25 ++++--------------------- 1 file changed, 4 insertions(+), 21 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 23ee72215..29a850da8 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -284,15 +284,10 @@ GaussianBayesNetValTree HybridBayesNet::assembleTree() const { AlgebraicDecisionTree HybridBayesNet::modelSelection() const { /* To perform model selection, we need: - q(mu; M, Z) * sqrt((2*pi)^n*det(Sigma)) - - If q(mu; M, Z) = exp(-error) & k = 1.0 / sqrt((2*pi)^n*det(Sigma)) - thus, q * sqrt((2*pi)^n*det(Sigma)) = q/k = exp(log(q/k)) - = exp(log(q) - log(k)) = exp(-error - log(k)) - = exp(-(error + log(k))), + q(mu; M, Z) = exp(-error) where error is computed at the corresponding MAP point, gbn.error(mu). - So we compute (error + log(k)) and exponentiate later + So we compute `error` and exponentiate after. */ GaussianBayesNetValTree bnTree = assembleTree(); @@ -316,22 +311,10 @@ AlgebraicDecisionTree HybridBayesNet::modelSelection() const { return std::make_pair(gbnAndValue.first, error); }); + // Compute model selection term (with help from ADT methods) auto trees = unzip(bn_error); AlgebraicDecisionTree errorTree = trees.second; - - // Only compute logNormalizationConstant - AlgebraicDecisionTree log_norm_constants = DecisionTree( - bnTree, [](const std::pair &gbnAndValue) { - GaussianBayesNet gbn = gbnAndValue.first; - if (gbn.size() == 0) { - return 0.0; - } - return gbn.logNormalizationConstant(); - }); - - // Compute model selection term (with help from ADT methods) - AlgebraicDecisionTree modelSelectionTerm = - (errorTree + log_norm_constants) * -1; + AlgebraicDecisionTree modelSelectionTerm = errorTree * -1; double max_log = modelSelectionTerm.max(); modelSelectionTerm = DecisionTree( From 654bad738170185911a53ef551a64041a5975bf2 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 14:04:37 -0400 Subject: [PATCH 069/398] remove model selection code --- gtsam/hybrid/GaussianMixture.cpp | 51 ++++++------ gtsam/hybrid/HybridBayesNet.cpp | 134 ------------------------------- gtsam/hybrid/HybridBayesNet.h | 28 ------- gtsam/hybrid/HybridFactor.h | 7 +- 4 files changed, 27 insertions(+), 193 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 7f42e6986..8278549d0 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -71,29 +71,27 @@ GaussianMixture::GaussianMixture( Conditionals(discreteParents, conditionals)) {} /* *******************************************************************************/ -// TODO(dellaert): This is copy/paste: GaussianMixture should be derived from -// GaussianMixtureFactor, no? -GaussianFactorGraphTree GaussianMixture::add( - const GaussianFactorGraphTree &sum) const { - using Y = GaussianFactorGraph; - auto add = [](const Y &graph1, const Y &graph2) { - auto result = graph1; - result.push_back(graph2); - return result; - }; - const auto tree = asGaussianFactorGraphTree(); - return sum.empty() ? tree : sum.apply(tree, add); -} - -/* *******************************************************************************/ -GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { +GaussianBayesNetTree GaussianMixture::asGaussianBayesNetTree() const { auto wrap = [](const GaussianConditional::shared_ptr &gc) { - return GaussianFactorGraph{gc}; + if (gc) { + return GaussianBayesNet{gc}; + } else { + return GaussianBayesNet(); + } }; return {conditionals_, wrap}; } /* *******************************************************************************/ +GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { + auto wrap = [](const GaussianBayesNet &gbn) { + return GaussianFactorGraph(gbn); + }; + return {this->asGaussianBayesNetTree(), wrap}; +} + +/* +*******************************************************************************/ GaussianBayesNetTree GaussianMixture::add( const GaussianBayesNetTree &sum) const { using Y = GaussianBayesNet; @@ -110,15 +108,18 @@ GaussianBayesNetTree GaussianMixture::add( } /* *******************************************************************************/ -GaussianBayesNetTree GaussianMixture::asGaussianBayesNetTree() const { - auto wrap = [](const GaussianConditional::shared_ptr &gc) { - if (gc) { - return GaussianBayesNet{gc}; - } else { - return GaussianBayesNet(); - } +// TODO(dellaert): This is copy/paste: GaussianMixture should be derived from +// GaussianMixtureFactor, no? +GaussianFactorGraphTree GaussianMixture::add( + const GaussianFactorGraphTree &sum) const { + using Y = GaussianFactorGraph; + auto add = [](const Y &graph1, const Y &graph2) { + auto result = graph1; + result.push_back(graph2); + return result; }; - return {conditionals_, wrap}; + const auto tree = asGaussianFactorGraphTree(); + return sum.empty() ? tree : sum.apply(tree, add); } /* *******************************************************************************/ diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 29a850da8..1d01baed2 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -26,16 +26,6 @@ static std::mt19937_64 kRandomNumberGenerator(42); namespace gtsam { -/* ************************************************************************ */ -// Throw a runtime exception for method specified in string s, -// and conditional f: -static void throwRuntimeError(const std::string &s, - const std::shared_ptr &f) { - auto &fr = *f; - throw std::runtime_error(s + " not implemented for conditional type " + - demangle(typeid(fr).name()) + "."); -} - /* ************************************************************************* */ void HybridBayesNet::print(const std::string &s, const KeyFormatter &formatter) const { @@ -227,141 +217,17 @@ GaussianBayesNet HybridBayesNet::choose( return gbn; } -/* ************************************************************************ */ -static GaussianBayesNetTree addGaussian( - const GaussianBayesNetTree &gfgTree, - const GaussianConditional::shared_ptr &factor) { - // If the decision tree is not initialized, then initialize it. - if (gfgTree.empty()) { - GaussianBayesNet result{factor}; - return GaussianBayesNetTree(result); - } else { - auto add = [&factor](const GaussianBayesNet &graph) { - auto result = graph; - result.push_back(factor); - return result; - }; - return gfgTree.apply(add); - } -} - -/* ************************************************************************ */ -GaussianBayesNetValTree HybridBayesNet::assembleTree() const { - GaussianBayesNetTree result; - - for (auto &f : factors_) { - // TODO(dellaert): just use a virtual method defined in HybridFactor. - if (auto gm = std::dynamic_pointer_cast(f)) { - result = gm->add(result); - } else if (auto hc = std::dynamic_pointer_cast(f)) { - if (auto gm = hc->asMixture()) { - result = gm->add(result); - } else if (auto g = hc->asGaussian()) { - result = addGaussian(result, g); - } else { - // Has to be discrete. - // TODO(dellaert): in C++20, we can use std::visit. - continue; - } - } else if (std::dynamic_pointer_cast(f)) { - // Don't do anything for discrete-only factors - // since we want to evaluate continuous values only. - continue; - } else { - // We need to handle the case where the object is actually an - // BayesTreeOrphanWrapper! - throwRuntimeError("HybridBayesNet::assembleTree", f); - } - } - - GaussianBayesNetValTree resultTree(result, [](const GaussianBayesNet &gbn) { - return std::make_pair(gbn, 0.0); - }); - return resultTree; -} - -/* ************************************************************************* */ -AlgebraicDecisionTree HybridBayesNet::modelSelection() const { - /* - To perform model selection, we need: - q(mu; M, Z) = exp(-error) - where error is computed at the corresponding MAP point, gbn.error(mu). - - So we compute `error` and exponentiate after. - */ - - GaussianBayesNetValTree bnTree = assembleTree(); - - GaussianBayesNetValTree bn_error = bnTree.apply( - [this](const Assignment &assignment, - const std::pair &gbnAndValue) { - // Compute the X* of each assignment - VectorValues mu = gbnAndValue.first.optimize(); - - // mu is empty if gbn had nullptrs - if (mu.size() == 0) { - return std::make_pair(gbnAndValue.first, - std::numeric_limits::max()); - } - - // Compute the error for X* and the assignment - double error = - this->error(HybridValues(mu, DiscreteValues(assignment))); - - return std::make_pair(gbnAndValue.first, error); - }); - - // Compute model selection term (with help from ADT methods) - auto trees = unzip(bn_error); - AlgebraicDecisionTree errorTree = trees.second; - AlgebraicDecisionTree modelSelectionTerm = errorTree * -1; - - double max_log = modelSelectionTerm.max(); - modelSelectionTerm = DecisionTree( - modelSelectionTerm, - [&max_log](const double &x) { return std::exp(x - max_log); }); - modelSelectionTerm = modelSelectionTerm.normalize(modelSelectionTerm.sum()); - - return modelSelectionTerm; -} - /* ************************************************************************* */ HybridValues HybridBayesNet::optimize() const { // Collect all the discrete factors to compute MPE DiscreteFactorGraph discrete_fg; - // Compute model selection term - AlgebraicDecisionTree modelSelectionTerm = modelSelection(); - - // Get the set of all discrete keys involved in model selection - std::set discreteKeySet; for (auto &&conditional : *this) { if (conditional->isDiscrete()) { discrete_fg.push_back(conditional->asDiscrete()); - } else { - if (conditional->isContinuous()) { - /* - If we are here, it means there are no discrete variables in - the Bayes net (due to strong elimination ordering). - This is a continuous-only problem hence model selection doesn't matter. - */ - - } else if (conditional->isHybrid()) { - auto gm = conditional->asMixture(); - // Include the discrete keys - std::copy(gm->discreteKeys().begin(), gm->discreteKeys().end(), - std::inserter(discreteKeySet, discreteKeySet.end())); - } } } - // Only add model_selection if we have discrete keys - if (discreteKeySet.size() > 0) { - discrete_fg.push_back(DecisionTreeFactor( - DiscreteKeys(discreteKeySet.begin(), discreteKeySet.end()), - modelSelectionTerm)); - } - // Solve for the MPE DiscreteValues mpe = discrete_fg.optimize(); diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index fe9010b1f..032cd55b9 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -118,34 +118,6 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { return evaluate(values); } - /** - * @brief Assemble a DecisionTree of (GaussianBayesNet, double) leaves for - * each discrete assignment. - * The included double value is used to make - * constructing the model selection term cleaner and more efficient. - * - * @return GaussianBayesNetValTree - */ - GaussianBayesNetValTree assembleTree() const; - - /* - Compute L(M;Z), the likelihood of the discrete model M - given the measurements Z. - This is called the model selection term. - - To do so, we perform the integration of L(M;Z) ∝ L(X;M,Z)P(X|M). - - By Bayes' rule, P(X|M,Z) ∝ L(X;M,Z)P(X|M), - hence L(X;M,Z)P(X|M) is the unnormalized probabilty of - the joint Gaussian distribution. - - This can be computed by multiplying all the exponentiated errors - of each of the conditionals. - - Return a tree where each leaf value is L(M_i;Z). - */ - AlgebraicDecisionTree modelSelection() const; - /** * @brief Solve the HybridBayesNet by first computing the MPE of all the * discrete variables and then optimizing the continuous variables based on diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index 8828a9172..418489d66 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -13,6 +13,7 @@ * @file HybridFactor.h * @date Mar 11, 2022 * @author Fan Jiang + * @author Varun Agrawal */ #pragma once @@ -35,12 +36,6 @@ class HybridValues; using GaussianFactorGraphTree = DecisionTree; /// Alias for DecisionTree of GaussianBayesNets using GaussianBayesNetTree = DecisionTree; -/** - * Alias for DecisionTree of (GaussianBayesNet, double) pairs. - * Used for model selection in BayesNet::optimize - */ -using GaussianBayesNetValTree = - DecisionTree>; KeyVector CollectKeys(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys); From 6b1d89d0a7ebb84db3c35f578820d1d3ae063838 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 14:19:35 -0400 Subject: [PATCH 070/398] fix testMixtureFactor test --- gtsam/hybrid/tests/testMixtureFactor.cpp | 26 ++++++------------------ 1 file changed, 6 insertions(+), 20 deletions(-) diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index c03df4d61..58a12b3d8 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -252,30 +252,16 @@ TEST(MixtureFactor, DifferentCovariances) { // Check that we get different error values at the MLE point μ. AlgebraicDecisionTree errorTree = hbn->errorTree(cv); - auto cond0 = hbn->at(0)->asMixture(); - auto cond1 = hbn->at(1)->asMixture(); - auto discrete_cond = hbn->at(2)->asDiscrete(); HybridValues hv0(cv, DiscreteValues{{M(1), 0}}); HybridValues hv1(cv, DiscreteValues{{M(1), 1}}); - AlgebraicDecisionTree expectedErrorTree( - m1, - cond0->error(hv0) // cond0(0)->logNormalizationConstant() - // - cond0(1)->logNormalizationConstant - + cond1->error(hv0) + discrete_cond->error(DiscreteValues{{M(1), 0}}), - cond0->error(hv1) // cond1(0)->logNormalizationConstant() - // - cond1(1)->logNormalizationConstant - + cond1->error(hv1) + - discrete_cond->error(DiscreteValues{{M(1), 0}})); + + auto cond0 = hbn->at(0)->asMixture(); + auto cond1 = hbn->at(1)->asMixture(); + auto discrete_cond = hbn->at(2)->asDiscrete(); + AlgebraicDecisionTree expectedErrorTree(m1, 9.90348755254, + 0.69314718056); EXPECT(assert_equal(expectedErrorTree, errorTree)); - - DiscreteValues dv; - dv.insert({M(1), 1}); - HybridValues expected_values(cv, dv); - - HybridValues actual_values = hbn->optimize(); - - EXPECT(assert_equal(expected_values, actual_values)); } /* ************************************************************************* */ From 0b1c3688c43f45808ee20e0e02aabaf9673cfc20 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 14:50:54 -0400 Subject: [PATCH 071/398] remove model selection from hybrid bayes tree --- gtsam/hybrid/HybridBayesNet.cpp | 33 -------- gtsam/hybrid/HybridBayesNet.h | 11 --- gtsam/hybrid/HybridBayesTree.cpp | 99 ------------------------ gtsam/hybrid/HybridBayesTree.h | 40 ---------- gtsam/hybrid/tests/testMixtureFactor.cpp | 39 +++------- 5 files changed, 9 insertions(+), 213 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 2bced6c0d..33c7c91da 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -378,37 +378,4 @@ HybridGaussianFactorGraph HybridBayesNet::toFactorGraph( } return fg; } - -/* ************************************************************************ */ -GaussianBayesNetTree addGaussian( - const GaussianBayesNetTree &gbnTree, - const GaussianConditional::shared_ptr &factor) { - // If the decision tree is not initialized, then initialize it. - if (gbnTree.empty()) { - GaussianBayesNet result{factor}; - return GaussianBayesNetTree(result); - } else { - auto add = [&factor](const GaussianBayesNet &graph) { - auto result = graph; - result.push_back(factor); - return result; - }; - return gbnTree.apply(add); - } -} - -/* ************************************************************************* */ -AlgebraicDecisionTree computeLogNormConstants( - const GaussianBayesNetValTree &bnTree) { - AlgebraicDecisionTree log_norm_constants = DecisionTree( - bnTree, [](const std::pair &gbnAndValue) { - GaussianBayesNet gbn = gbnAndValue.first; - if (gbn.size() == 0) { - return 0.0; - } - return gbn.logNormalizationConstant(); - }); - return log_norm_constants; -} - } // namespace gtsam diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 6f7b9aae7..55eaf6b5e 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -262,15 +262,4 @@ struct traits : public Testable {}; GaussianBayesNetTree addGaussian(const GaussianBayesNetTree &gbnTree, const GaussianConditional::shared_ptr &factor); -/** - * @brief Compute the (logarithmic) normalization constant for each Bayes - * network in the tree. - * - * @param bnTree A tree of Bayes networks in each leaf. The tree encodes a - * discrete assignment yielding the Bayes net. - * @return AlgebraicDecisionTree - */ -AlgebraicDecisionTree computeLogNormConstants( - const GaussianBayesNetValTree &bnTree); - } // namespace gtsam diff --git a/gtsam/hybrid/HybridBayesTree.cpp b/gtsam/hybrid/HybridBayesTree.cpp index 394c88928..f08eff01b 100644 --- a/gtsam/hybrid/HybridBayesTree.cpp +++ b/gtsam/hybrid/HybridBayesTree.cpp @@ -38,117 +38,18 @@ bool HybridBayesTree::equals(const This& other, double tol) const { return Base::equals(other, tol); } -GaussianBayesNetTree& HybridBayesTree::addCliqueToTree( - const sharedClique& clique, GaussianBayesNetTree& result) const { - // Perform bottom-up inclusion - for (sharedClique child : clique->children) { - result = addCliqueToTree(child, result); - } - - auto f = clique->conditional(); - - if (auto hc = std::dynamic_pointer_cast(f)) { - if (auto gm = hc->asMixture()) { - result = gm->add(result); - } else if (auto g = hc->asGaussian()) { - result = addGaussian(result, g); - } else { - // Has to be discrete, which we don't add. - } - } - return result; -} - -/* ************************************************************************ */ -GaussianBayesNetValTree HybridBayesTree::assembleTree() const { - GaussianBayesNetTree result; - for (auto&& root : roots_) { - result = addCliqueToTree(root, result); - } - - GaussianBayesNetValTree resultTree(result, [](const GaussianBayesNet& gbn) { - return std::make_pair(gbn, 0.0); - }); - return resultTree; -} - -/* ************************************************************************* */ -AlgebraicDecisionTree HybridBayesTree::modelSelection() const { - /* - To perform model selection, we need: - q(mu; M, Z) * sqrt((2*pi)^n*det(Sigma)) - - If q(mu; M, Z) = exp(-error) & k = 1.0 / sqrt((2*pi)^n*det(Sigma)) - thus, q * sqrt((2*pi)^n*det(Sigma)) = q/k = exp(log(q/k)) - = exp(log(q) - log(k)) = exp(-error - log(k)) - = exp(-(error + log(k))), - where error is computed at the corresponding MAP point, gbt.error(mu). - - So we compute (error + log(k)) and exponentiate later - */ - - GaussianBayesNetValTree bnTree = assembleTree(); - - GaussianBayesNetValTree bn_error = bnTree.apply( - [this](const Assignment& assignment, - const std::pair& gbnAndValue) { - // Compute the X* of each assignment - VectorValues mu = gbnAndValue.first.optimize(); - - // mu is empty if gbn had nullptrs - if (mu.size() == 0) { - return std::make_pair(gbnAndValue.first, - std::numeric_limits::max()); - } - - // Compute the error for X* and the assignment - double error = - this->error(HybridValues(mu, DiscreteValues(assignment))); - - return std::make_pair(gbnAndValue.first, error); - }); - - auto trees = unzip(bn_error); - AlgebraicDecisionTree errorTree = trees.second; - - // Compute model selection term (with help from ADT methods) - AlgebraicDecisionTree modelSelectionTerm = errorTree * -1; - - // Exponentiate using our scheme - double max_log = modelSelectionTerm.max(); - modelSelectionTerm = DecisionTree( - modelSelectionTerm, - [&max_log](const double& x) { return std::exp(x - max_log); }); - modelSelectionTerm = modelSelectionTerm.normalize(modelSelectionTerm.sum()); - - return modelSelectionTerm; -} - /* ************************************************************************* */ HybridValues HybridBayesTree::optimize() const { DiscreteFactorGraph discrete_fg; DiscreteValues mpe; - // Compute model selection term - AlgebraicDecisionTree modelSelectionTerm = modelSelection(); - auto root = roots_.at(0); // Access the clique and get the underlying hybrid conditional HybridConditional::shared_ptr root_conditional = root->conditional(); - // Get the set of all discrete keys involved in model selection - std::set discreteKeySet; - // The root should be discrete only, we compute the MPE if (root_conditional->isDiscrete()) { discrete_fg.push_back(root_conditional->asDiscrete()); - - // Only add model_selection if we have discrete keys - if (discreteKeySet.size() > 0) { - discrete_fg.push_back(DecisionTreeFactor( - DiscreteKeys(discreteKeySet.begin(), discreteKeySet.end()), - modelSelectionTerm)); - } mpe = discrete_fg.optimize(); } else { throw std::runtime_error( diff --git a/gtsam/hybrid/HybridBayesTree.h b/gtsam/hybrid/HybridBayesTree.h index 8327b7f31..af8eb3228 100644 --- a/gtsam/hybrid/HybridBayesTree.h +++ b/gtsam/hybrid/HybridBayesTree.h @@ -89,46 +89,6 @@ class GTSAM_EXPORT HybridBayesTree : public BayesTree { return HybridGaussianFactorGraph(*this).error(values); } - /** - * @brief Helper function to add a clique of hybrid conditionals to the passed - * in GaussianBayesNetTree. Operates recursively on the clique in a bottom-up - * fashion, adding the children first. - * - * @param clique The - * @param result - * @return GaussianBayesNetTree& - */ - GaussianBayesNetTree& addCliqueToTree(const sharedClique& clique, - GaussianBayesNetTree& result) const; - - /** - * @brief Assemble a DecisionTree of (GaussianBayesTree, double) leaves for - * each discrete assignment. - * The included double value is used to make - * constructing the model selection term cleaner and more efficient. - * - * @return GaussianBayesNetValTree - */ - GaussianBayesNetValTree assembleTree() const; - - /* - Compute L(M;Z), the likelihood of the discrete model M - given the measurements Z. - This is called the model selection term. - - To do so, we perform the integration of L(M;Z) ∝ L(X;M,Z)P(X|M). - - By Bayes' rule, P(X|M,Z) ∝ L(X;M,Z)P(X|M), - hence L(X;M,Z)P(X|M) is the unnormalized probabilty of - the joint Gaussian distribution. - - This can be computed by multiplying all the exponentiated errors - of each of the conditionals. - - Return a tree where each leaf value is L(M_i;Z). - */ - AlgebraicDecisionTree modelSelection() const; - /** * @brief Optimize the hybrid Bayes tree by computing the MPE for the current * set of discrete variables and using it to compute the best continuous diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index 9de277214..1bd4f5b88 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -252,30 +252,16 @@ TEST(MixtureFactor, DifferentCovariances) { // Check that we get different error values at the MLE point μ. AlgebraicDecisionTree errorTree = hbn->errorTree(cv); - auto cond0 = hbn->at(0)->asMixture(); - auto cond1 = hbn->at(1)->asMixture(); - auto discrete_cond = hbn->at(2)->asDiscrete(); HybridValues hv0(cv, DiscreteValues{{M(1), 0}}); HybridValues hv1(cv, DiscreteValues{{M(1), 1}}); - AlgebraicDecisionTree expectedErrorTree( - m1, - cond0->error(hv0) // cond0(0)->logNormalizationConstant() - // - cond0(1)->logNormalizationConstant - + cond1->error(hv0) + discrete_cond->error(DiscreteValues{{M(1), 0}}), - cond0->error(hv1) // cond1(0)->logNormalizationConstant() - // - cond1(1)->logNormalizationConstant - + cond1->error(hv1) + - discrete_cond->error(DiscreteValues{{M(1), 0}})); + + auto cond0 = hbn->at(0)->asMixture(); + auto cond1 = hbn->at(1)->asMixture(); + auto discrete_cond = hbn->at(2)->asDiscrete(); + AlgebraicDecisionTree expectedErrorTree(m1, 9.90348755254, + 0.69314718056); EXPECT(assert_equal(expectedErrorTree, errorTree)); - - DiscreteValues dv; - dv.insert({M(1), 1}); - HybridValues expected_values(cv, dv); - - HybridValues actual_values = hbn->optimize(); - - EXPECT(assert_equal(expected_values, actual_values)); } /* ************************************************************************* */ @@ -329,16 +315,7 @@ TEST(MixtureFactor, DifferentMeansAndCovariances) { auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); mixture_fg.push_back(prior); - // bn.print("BayesNet:"); - // mixture_fg.print("\n\n"); - VectorValues vv{{X(1), x1 * I_1x1}, {X(2), x2 * I_1x1}}; - // std::cout << "FG error for m1=0: " - // << mixture_fg.error(HybridValues(vv, DiscreteValues{{m1.first, 0}})) - // << std::endl; - // std::cout << "FG error for m1=1: " - // << mixture_fg.error(HybridValues(vv, DiscreteValues{{m1.first, 1}})) - // << std::endl; auto hbn = mixture_fg.eliminateSequential(); @@ -347,8 +324,10 @@ TEST(MixtureFactor, DifferentMeansAndCovariances) { VectorValues cv; cv.insert(X(1), Vector1(0.0)); cv.insert(X(2), Vector1(-7.0)); + + // The first value is chosen as the tiebreaker DiscreteValues dv; - dv.insert({M(1), 1}); + dv.insert({M(1), 0}); HybridValues expected_values(cv, dv); EXPECT(assert_equal(expected_values, actual_values)); From 6d9fc8e5f21b020e5dc8f250912d973d243ee665 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 15:26:04 -0400 Subject: [PATCH 072/398] undo change in GaussianMixture --- gtsam/hybrid/GaussianMixture.cpp | 44 ++++++++++++++++---------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 8278549d0..36a34226b 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -71,23 +71,26 @@ GaussianMixture::GaussianMixture( Conditionals(discreteParents, conditionals)) {} /* *******************************************************************************/ -GaussianBayesNetTree GaussianMixture::asGaussianBayesNetTree() const { - auto wrap = [](const GaussianConditional::shared_ptr &gc) { - if (gc) { - return GaussianBayesNet{gc}; - } else { - return GaussianBayesNet(); - } +// TODO(dellaert): This is copy/paste: GaussianMixture should be derived from +// GaussianMixtureFactor, no? +GaussianFactorGraphTree GaussianMixture::add( + const GaussianFactorGraphTree &sum) const { + using Y = GaussianFactorGraph; + auto add = [](const Y &graph1, const Y &graph2) { + auto result = graph1; + result.push_back(graph2); + return result; }; - return {conditionals_, wrap}; + const auto tree = asGaussianFactorGraphTree(); + return sum.empty() ? tree : sum.apply(tree, add); } /* *******************************************************************************/ GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { - auto wrap = [](const GaussianBayesNet &gbn) { - return GaussianFactorGraph(gbn); + auto wrap = [](const GaussianConditional::shared_ptr &gc) { + return GaussianFactorGraph{gc}; }; - return {this->asGaussianBayesNetTree(), wrap}; + return {conditionals_, wrap}; } /* @@ -108,18 +111,15 @@ GaussianBayesNetTree GaussianMixture::add( } /* *******************************************************************************/ -// TODO(dellaert): This is copy/paste: GaussianMixture should be derived from -// GaussianMixtureFactor, no? -GaussianFactorGraphTree GaussianMixture::add( - const GaussianFactorGraphTree &sum) const { - using Y = GaussianFactorGraph; - auto add = [](const Y &graph1, const Y &graph2) { - auto result = graph1; - result.push_back(graph2); - return result; +GaussianBayesNetTree GaussianMixture::asGaussianBayesNetTree() const { + auto wrap = [](const GaussianConditional::shared_ptr &gc) { + if (gc) { + return GaussianBayesNet{gc}; + } else { + return GaussianBayesNet(); + } }; - const auto tree = asGaussianFactorGraphTree(); - return sum.empty() ? tree : sum.apply(tree, add); + return {conditionals_, wrap}; } /* *******************************************************************************/ From fd2062b2079a319a5a70c058aacc0adb3f252065 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 15:48:07 -0400 Subject: [PATCH 073/398] remove changes so we can break up PR into smaller ones --- gtsam/hybrid/GaussianMixture.cpp | 67 +---- gtsam/hybrid/GaussianMixture.h | 18 +- gtsam/hybrid/GaussianMixtureFactor.cpp | 83 +----- gtsam/hybrid/GaussianMixtureFactor.h | 18 +- gtsam/hybrid/HybridBayesNet.cpp | 6 +- gtsam/hybrid/HybridFactor.h | 3 - gtsam/hybrid/HybridGaussianFactorGraph.cpp | 40 +-- .../tests/testGaussianMixtureFactor.cpp | 247 +----------------- .../tests/testHybridNonlinearFactorGraph.cpp | 15 -- gtsam/hybrid/tests/testMixtureFactor.cpp | 149 ----------- gtsam/linear/GaussianBayesNet.cpp | 20 -- gtsam/linear/GaussianBayesNet.h | 14 - gtsam/linear/GaussianConditional.cpp | 10 +- gtsam/linear/VectorValues.h | 5 - gtsam/linear/linear.i | 5 - .../linear/tests/testGaussianConditional.cpp | 3 - 16 files changed, 37 insertions(+), 666 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 36a34226b..531a9f2e5 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -24,7 +24,6 @@ #include #include #include -#include #include namespace gtsam { @@ -93,35 +92,6 @@ GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { return {conditionals_, wrap}; } -/* -*******************************************************************************/ -GaussianBayesNetTree GaussianMixture::add( - const GaussianBayesNetTree &sum) const { - using Y = GaussianBayesNet; - auto add = [](const Y &graph1, const Y &graph2) { - auto result = graph1; - if (graph2.size() == 0) { - return GaussianBayesNet(); - } - result.push_back(graph2); - return result; - }; - const auto tree = asGaussianBayesNetTree(); - return sum.empty() ? tree : sum.apply(tree, add); -} - -/* *******************************************************************************/ -GaussianBayesNetTree GaussianMixture::asGaussianBayesNetTree() const { - auto wrap = [](const GaussianConditional::shared_ptr &gc) { - if (gc) { - return GaussianBayesNet{gc}; - } else { - return GaussianBayesNet(); - } - }; - return {conditionals_, wrap}; -} - /* *******************************************************************************/ size_t GaussianMixture::nrComponents() const { size_t total = 0; @@ -348,15 +318,8 @@ AlgebraicDecisionTree GaussianMixture::logProbability( AlgebraicDecisionTree GaussianMixture::errorTree( const VectorValues &continuousValues) const { auto errorFunc = [&](const GaussianConditional::shared_ptr &conditional) { - // Check if valid pointer - if (conditional) { - return conditional->error(continuousValues) + // - logConstant_ - conditional->logNormalizationConstant(); - } else { - // If not valid, pointer, it means this conditional was pruned, - // so we return maximum error. - return std::numeric_limits::max(); - } + return conditional->error(continuousValues) + // + logConstant_ - conditional->logNormalizationConstant(); }; DecisionTree error_tree(conditionals_, errorFunc); return error_tree; @@ -364,32 +327,10 @@ AlgebraicDecisionTree GaussianMixture::errorTree( /* *******************************************************************************/ double GaussianMixture::error(const HybridValues &values) const { - // Check if discrete keys in discrete assignment are - // present in the GaussianMixture - KeyVector dKeys = this->discreteKeys_.indices(); - bool valid_assignment = false; - for (auto &&kv : values.discrete()) { - if (std::find(dKeys.begin(), dKeys.end(), kv.first) != dKeys.end()) { - valid_assignment = true; - break; - } - } - - // The discrete assignment is not valid so we return 0.0 erorr. - if (!valid_assignment) { - return 0.0; - } - // Directly index to get the conditional, no need to build the whole tree. auto conditional = conditionals_(values.discrete()); - if (conditional) { - return conditional->error(values.continuous()) + // - logConstant_ - conditional->logNormalizationConstant(); - } else { - // If not valid, pointer, it means this conditional was pruned, - // so we return maximum error. - return std::numeric_limits::max(); - } + return conditional->error(values.continuous()) + // + logConstant_ - conditional->logNormalizationConstant(); } /* *******************************************************************************/ diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index cefc14241..c1ef504f8 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -72,12 +72,6 @@ class GTSAM_EXPORT GaussianMixture */ GaussianFactorGraphTree asGaussianFactorGraphTree() const; - /** - * @brief Convert a DecisionTree of conditionals into - * a DecisionTree of Gaussian Bayes nets. - */ - GaussianBayesNetTree asGaussianBayesNetTree() const; - /** * @brief Helper function to get the pruner functor. * @@ -221,7 +215,8 @@ class GTSAM_EXPORT GaussianMixture * @return AlgebraicDecisionTree A decision tree on the discrete keys * only, with the leaf values as the error for each assignment. */ - AlgebraicDecisionTree errorTree(const VectorValues &continuousValues) const; + AlgebraicDecisionTree errorTree( + const VectorValues &continuousValues) const; /** * @brief Compute the logProbability of this Gaussian Mixture. @@ -255,15 +250,6 @@ class GTSAM_EXPORT GaussianMixture * @return GaussianFactorGraphTree */ GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const; - - /** - * @brief Merge the Gaussian Bayes Nets in `this` and `sum` while - * maintaining the decision tree structure. - * - * @param sum Decision Tree of Gaussian Bayes Nets - * @return GaussianBayesNetTree - */ - GaussianBayesNetTree add(const GaussianBayesNetTree &sum) const; /// @} private: diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index e519cefe6..a3db16d04 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -28,86 +28,11 @@ namespace gtsam { -/** - * @brief Helper function to correct the [A|b] matrices in the factor components - * with the normalizer values. - * This is done by storing the normalizer value in - * the `b` vector as an additional row. - * - * @param factors DecisionTree of GaussianFactor shared pointers. - * @param varyingNormalizers Flag indicating the normalizers are different for - * each component. - * @return GaussianMixtureFactor::Factors - */ -GaussianMixtureFactor::Factors correct( - const GaussianMixtureFactor::Factors &factors, bool varyingNormalizers) { - if (!varyingNormalizers) { - return factors; - } - - // First compute all the sqrt(|2 pi Sigma|) terms - auto computeNormalizers = [](const GaussianMixtureFactor::sharedFactor &gf) { - auto jf = std::dynamic_pointer_cast(gf); - // If we have, say, a Hessian factor, then no need to do anything - if (!jf) return 0.0; - - auto model = jf->get_model(); - // If there is no noise model, there is nothing to do. - if (!model) { - return 0.0; - } - // Since noise models are Gaussian, we can get the logDeterminant using the - // same trick as in GaussianConditional - double logDetR = - model->R().diagonal().unaryExpr([](double x) { return log(x); }).sum(); - double logDeterminantSigma = -2.0 * logDetR; - - size_t n = model->dim(); - constexpr double log2pi = 1.8378770664093454835606594728112; - return n * log2pi + logDeterminantSigma; - }; - - AlgebraicDecisionTree log_normalizers = - DecisionTree(factors, computeNormalizers); - - // Find the minimum value so we can "proselytize" to positive values. - // Done because we can't have sqrt of negative numbers. - double min_log_normalizer = log_normalizers.min(); - log_normalizers = log_normalizers.apply( - [&min_log_normalizer](double n) { return n - min_log_normalizer; }); - - // Finally, update the [A|b] matrices. - auto update = [&log_normalizers]( - const Assignment &assignment, - const GaussianMixtureFactor::sharedFactor &gf) { - auto jf = std::dynamic_pointer_cast(gf); - if (!jf) return gf; - // If there is no noise model, there is nothing to do. - if (!jf->get_model()) return gf; - // If the log_normalizer is 0, do nothing - if (log_normalizers(assignment) == 0.0) return gf; - - GaussianFactorGraph gfg; - gfg.push_back(jf); - - Vector c(1); - c << std::sqrt(log_normalizers(assignment)); - auto constantFactor = std::make_shared(c); - - gfg.push_back(constantFactor); - return std::dynamic_pointer_cast( - std::make_shared(gfg)); - }; - return factors.apply(update); -} - /* *******************************************************************************/ GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors, - bool varyingNormalizers) - : Base(continuousKeys, discreteKeys), - factors_(correct(factors, varyingNormalizers)) {} + const Factors &factors) + : Base(continuousKeys, discreteKeys), factors_(factors) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { @@ -129,9 +54,7 @@ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { /* *******************************************************************************/ void GaussianMixtureFactor::print(const std::string &s, const KeyFormatter &formatter) const { - std::cout << (s.empty() ? "" : s + "\n"); - std::cout << "GaussianMixtureFactor" << std::endl; - HybridFactor::print("", formatter); + HybridFactor::print(s, formatter); std::cout << "{\n"; if (factors_.empty()) { std::cout << " empty" << std::endl; diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 588501bbe..67d12ddb0 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -82,13 +82,10 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * their cardinalities. * @param factors The decision tree of Gaussian factors stored as the mixture * density. - * @param varyingNormalizers Flag indicating factor components have varying - * normalizer values. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors, - bool varyingNormalizers = false); + const Factors &factors); /** * @brief Construct a new GaussianMixtureFactor object using a vector of @@ -97,16 +94,12 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. - * @param varyingNormalizers Flag indicating factor components have varying - * normalizer values. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const std::vector &factors, - bool varyingNormalizers = false) + const std::vector &factors) : GaussianMixtureFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors), - varyingNormalizers) {} + Factors(discreteKeys, factors)) {} /// @} /// @name Testable @@ -114,8 +107,9 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { bool equals(const HybridFactor &lf, double tol = 1e-9) const override; - void print(const std::string &s = "", const KeyFormatter &formatter = - DefaultKeyFormatter) const override; + void print( + const std::string &s = "GaussianMixtureFactor\n", + const KeyFormatter &formatter = DefaultKeyFormatter) const override; /// @} /// @name Standard API diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 1d01baed2..bb0f23686 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -220,16 +220,16 @@ GaussianBayesNet HybridBayesNet::choose( /* ************************************************************************* */ HybridValues HybridBayesNet::optimize() const { // Collect all the discrete factors to compute MPE - DiscreteFactorGraph discrete_fg; + DiscreteBayesNet discrete_bn; for (auto &&conditional : *this) { if (conditional->isDiscrete()) { - discrete_fg.push_back(conditional->asDiscrete()); + discrete_bn.push_back(conditional->asDiscrete()); } } // Solve for the MPE - DiscreteValues mpe = discrete_fg.optimize(); + DiscreteValues mpe = DiscreteFactorGraph(discrete_bn).optimize(); // Given the MPE, compute the optimal continuous values. return HybridValues(optimize(mpe), mpe); diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index 418489d66..afd1c8032 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -13,7 +13,6 @@ * @file HybridFactor.h * @date Mar 11, 2022 * @author Fan Jiang - * @author Varun Agrawal */ #pragma once @@ -34,8 +33,6 @@ class HybridValues; /// Alias for DecisionTree of GaussianFactorGraphs using GaussianFactorGraphTree = DecisionTree; -/// Alias for DecisionTree of GaussianBayesNets -using GaussianBayesNetTree = DecisionTree; KeyVector CollectKeys(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys); diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index f05dfd423..b764dc9e0 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -279,37 +279,21 @@ GaussianFactorGraphTree removeEmpty(const GaussianFactorGraphTree &sum) { using Result = std::pair, GaussianMixtureFactor::sharedFactor>; -/** - * Compute the probability q(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m) - * from the residual error at the mean μ. - * The residual error contains no keys, and only - * depends on the discrete separator if present. - */ +// Integrate the probability mass in the last continuous conditional using +// the unnormalized probability q(μ;m) = exp(-error(μ;m)) at the mean. +// discrete_probability = exp(-error(μ;m)) * sqrt(det(2π Σ_m)) static std::shared_ptr createDiscreteFactor( const DecisionTree &eliminationResults, const DiscreteKeys &discreteSeparator) { - auto logProbability = [&](const Result &pair) -> double { + auto probability = [&](const Result &pair) -> double { const auto &[conditional, factor] = pair; static const VectorValues kEmpty; // If the factor is not null, it has no keys, just contains the residual. if (!factor) return 1.0; // TODO(dellaert): not loving this. - - // Logspace version of: - // exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); - // We take negative of the logNormalizationConstant `log(1/k)` - // to get `log(k)`. - return -factor->error(kEmpty) + (-conditional->logNormalizationConstant()); + return exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); }; - AlgebraicDecisionTree logProbabilities( - DecisionTree(eliminationResults, logProbability)); - - // Perform normalization - double max_log = logProbabilities.max(); - AlgebraicDecisionTree probabilities = DecisionTree( - logProbabilities, - [&max_log](const double x) { return exp(x - max_log); }); - probabilities = probabilities.normalize(probabilities.sum()); + DecisionTree probabilities(eliminationResults, probability); return std::make_shared(discreteSeparator, probabilities); } @@ -370,12 +354,6 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // Perform elimination! DecisionTree eliminationResults(factorGraphTree, eliminate); - // Create the GaussianMixture from the conditionals - GaussianMixture::Conditionals conditionals( - eliminationResults, [](const Result &pair) { return pair.first; }); - auto gaussianMixture = std::make_shared( - frontalKeys, continuousSeparator, discreteSeparator, conditionals); - // If there are no more continuous parents we create a DiscreteFactor with the // error for each discrete choice. Otherwise, create a GaussianMixtureFactor // on the separator, taking care to correct for conditional constants. @@ -385,6 +363,12 @@ hybridElimination(const HybridGaussianFactorGraph &factors, : createGaussianMixtureFactor(eliminationResults, continuousSeparator, discreteSeparator); + // Create the GaussianMixture from the conditionals + GaussianMixture::Conditionals conditionals( + eliminationResults, [](const Result &pair) { return pair.first; }); + auto gaussianMixture = std::make_shared( + frontalKeys, continuousSeparator, discreteSeparator, conditionals); + return {std::make_shared(gaussianMixture), newFactor}; } diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 47b9ddc99..9cc7e6bfd 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -22,13 +22,9 @@ #include #include #include -#include -#include #include #include #include -#include -#include // Include for test suite #include @@ -60,6 +56,7 @@ TEST(GaussianMixtureFactor, Sum) { auto b = Matrix::Zero(2, 1); Vector2 sigmas; sigmas << 1, 2; + auto model = noiseModel::Diagonal::Sigmas(sigmas, true); auto f10 = std::make_shared(X(1), A1, X(2), A2, b); auto f11 = std::make_shared(X(1), A1, X(2), A2, b); @@ -109,8 +106,7 @@ TEST(GaussianMixtureFactor, Printing) { GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); std::string expected = - R"(GaussianMixtureFactor -Hybrid [x1 x2; 1]{ + R"(Hybrid [x1 x2; 1]{ Choice(1) 0 Leaf : A[x1] = [ @@ -182,8 +178,7 @@ TEST(GaussianMixtureFactor, Error) { continuousValues.insert(X(2), Vector2(1, 1)); // error should return a tree of errors, with nodes for each discrete value. - AlgebraicDecisionTree error_tree = - mixtureFactor.errorTree(continuousValues); + AlgebraicDecisionTree error_tree = mixtureFactor.errorTree(continuousValues); std::vector discrete_keys = {m1}; // Error values for regression test @@ -196,240 +191,8 @@ TEST(GaussianMixtureFactor, Error) { DiscreteValues discreteValues; discreteValues[m1.first] = 1; EXPECT_DOUBLES_EQUAL( - 4.0, mixtureFactor.error({continuousValues, discreteValues}), 1e-9); -} - -/* ************************************************************************* */ -// Test components with differing means -TEST(GaussianMixtureFactor, DifferentMeans) { - DiscreteKey m1(M(1), 2), m2(M(2), 2); - - Values values; - double x1 = 0.0, x2 = 1.75, x3 = 2.60; - values.insert(X(1), x1); - values.insert(X(2), x2); - values.insert(X(3), x3); - - auto model0 = noiseModel::Isotropic::Sigma(1, 1e-0); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-0); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-0); - - auto f0 = std::make_shared>(X(1), X(2), 0.0, model0) - ->linearize(values); - auto f1 = std::make_shared>(X(1), X(2), 2.0, model1) - ->linearize(values); - std::vector factors{f0, f1}; - - GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors, true); - HybridGaussianFactorGraph hfg; - hfg.push_back(mixtureFactor); - - f0 = std::make_shared>(X(2), X(3), 0.0, model0) - ->linearize(values); - f1 = std::make_shared>(X(2), X(3), 2.0, model1) - ->linearize(values); - std::vector factors23{f0, f1}; - hfg.push_back(GaussianMixtureFactor({X(2), X(3)}, {m2}, factors23, true)); - - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); - hfg.push_back(prior); - - hfg.push_back(PriorFactor(X(2), 2.0, prior_noise).linearize(values)); - - auto bn = hfg.eliminateSequential(); - HybridValues actual = bn->optimize(); - - HybridValues expected( - VectorValues{ - {X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}, {X(3), Vector1(-0.6)}}, - DiscreteValues{{M(1), 1}, {M(2), 0}}); - - EXPECT(assert_equal(expected, actual)); - - { - DiscreteValues dv{{M(1), 0}, {M(2), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 0}, {M(2), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}, {M(2), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}, {M(2), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); - } -} - -/* ************************************************************************* */ -/** - * @brief Test components with differing covariances. - * The factor graph is - * *-X1-*-X2 - * | - * M1 - */ -TEST(GaussianMixtureFactor, DifferentCovariances) { - DiscreteKey m1(M(1), 2); - - Values values; - double x1 = 1.0, x2 = 1.0; - values.insert(X(1), x1); - values.insert(X(2), x2); - - double between = 0.0; - - auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - - auto f0 = - std::make_shared>(X(1), X(2), between, model0); - auto f1 = - std::make_shared>(X(1), X(2), between, model1); - std::vector factors{f0, f1}; - - // Create via toFactorGraph - using symbol_shorthand::Z; - Matrix H0_1, H0_2, H1_1, H1_2; - Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); - std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H0_1 /*Sp1*/}, - {X(2), H0_2 /*Tp2*/}}; - - Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); - std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H1_1 /*Sp1*/}, - {X(2), H1_2 /*Tp2*/}}; - gtsam::GaussianMixtureFactor gmf( - {X(1), X(2)}, {m1}, - {std::make_shared(X(1), H0_1, X(2), H0_2, -d0, model0), - std::make_shared(X(1), H1_1, X(2), H1_2, -d1, model1)}, - true); - - // Create FG with single GaussianMixtureFactor - HybridGaussianFactorGraph mixture_fg; - mixture_fg.add(gmf); - - // Linearized prior factor on X1 - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); - mixture_fg.push_back(prior); - - auto hbn = mixture_fg.eliminateSequential(); - // hbn->print(); - - VectorValues cv; - cv.insert(X(1), Vector1(0.0)); - cv.insert(X(2), Vector1(0.0)); - - // Check that the error values at the MLE point μ. - AlgebraicDecisionTree errorTree = hbn->errorTree(cv); - - DiscreteValues dv0{{M(1), 0}}; - DiscreteValues dv1{{M(1), 1}}; - - // regression - EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); - EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); - - DiscreteConditional expected_m1(m1, "0.5/0.5"); - DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); - - EXPECT(assert_equal(expected_m1, actual_m1)); -} - -/* ************************************************************************* */ -/** - * @brief Test components with differing covariances - * but with a Bayes net P(Z|X, M) converted to a FG. - */ -TEST(GaussianMixtureFactor, DifferentCovariances2) { - DiscreteKey m1(M(1), 2); - - Values values; - double x1 = 1.0, x2 = 1.0; - values.insert(X(1), x1); - values.insert(X(2), x2); - - double between = 0.0; - - auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - - auto f0 = - std::make_shared>(X(1), X(2), between, model0); - auto f1 = - std::make_shared>(X(1), X(2), between, model1); - std::vector factors{f0, f1}; - - // Create via toFactorGraph - using symbol_shorthand::Z; - Matrix H0_1, H0_2, H1_1, H1_2; - Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); - std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H0_1 /*Sp1*/}, - {X(2), H0_2 /*Tp2*/}}; - - Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); - std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H1_1 /*Sp1*/}, - {X(2), H1_2 /*Tp2*/}}; - auto gm = new gtsam::GaussianMixture( - {Z(1)}, {X(1), X(2)}, {m1}, - {std::make_shared(terms0, 1, -d0, model0), - std::make_shared(terms1, 1, -d1, model1)}); - gtsam::HybridBayesNet bn; - bn.emplace_back(gm); - - gtsam::VectorValues measurements; - measurements.insert(Z(1), gtsam::Z_1x1); - // Create FG with single GaussianMixtureFactor - HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); - - // Linearized prior factor on X1 - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); - mixture_fg.push_back(prior); - - auto hbn = mixture_fg.eliminateSequential(); - - VectorValues cv; - cv.insert(X(1), Vector1(0.0)); - cv.insert(X(2), Vector1(0.0)); - - // Check that the error values at the MLE point μ. - AlgebraicDecisionTree errorTree = hbn->errorTree(cv); - - DiscreteValues dv0{{M(1), 0}}; - DiscreteValues dv1{{M(1), 1}}; - - // regression - EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); - EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); - - DiscreteConditional expected_m1(m1, "0.5/0.5"); - DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); - - EXPECT(assert_equal(expected_m1, actual_m1)); + 4.0, mixtureFactor.error({continuousValues, discreteValues}), + 1e-9); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 2d851b0ff..cf56e35d6 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -510,7 +510,6 @@ factor 0: b = [ -10 ] No noise model factor 1: -GaussianMixtureFactor Hybrid [x0 x1; m0]{ Choice(m0) 0 Leaf : @@ -535,7 +534,6 @@ Hybrid [x0 x1; m0]{ } factor 2: -GaussianMixtureFactor Hybrid [x1 x2; m1]{ Choice(m1) 0 Leaf : @@ -677,26 +675,22 @@ factor 6: P( m1 | m0 ): size: 3 conditional 0: Hybrid P( x0 | x1 m0) Discrete Keys = (m0, 2), - logNormalizationConstant: 1.38862 Choice(m0) 0 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.85087 ] - logNormalizationConstant: 1.38862 No noise model 1 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.95037 ] - logNormalizationConstant: 1.38862 No noise model conditional 1: Hybrid P( x1 | x2 m0 m1) Discrete Keys = (m0, 2), (m1, 2), - logNormalizationConstant: 1.3935 Choice(m1) 0 Choice(m0) @@ -704,14 +698,12 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.99901 ] - logNormalizationConstant: 1.3935 No noise model 0 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.90098 ] - logNormalizationConstant: 1.3935 No noise model 1 Choice(m0) @@ -719,19 +711,16 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10.098 ] - logNormalizationConstant: 1.3935 No noise model 1 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10 ] - logNormalizationConstant: 1.3935 No noise model conditional 2: Hybrid P( x2 | m0 m1) Discrete Keys = (m0, 2), (m1, 2), - logNormalizationConstant: 1.38857 Choice(m1) 0 Choice(m0) @@ -740,7 +729,6 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1489 ] mean: 1 elements x2: -1.0099 - logNormalizationConstant: 1.38857 No noise model 0 1 Leaf p(x2) @@ -748,7 +736,6 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1479 ] mean: 1 elements x2: -1.0098 - logNormalizationConstant: 1.38857 No noise model 1 Choice(m0) @@ -757,7 +744,6 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0504 ] mean: 1 elements x2: -1.0001 - logNormalizationConstant: 1.38857 No noise model 1 1 Leaf p(x2) @@ -765,7 +751,6 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0494 ] mean: 1 elements x2: -1 - logNormalizationConstant: 1.38857 No noise model )"; diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index 58a12b3d8..0b2564403 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -18,9 +18,6 @@ #include #include -#include -#include -#include #include #include #include @@ -118,152 +115,6 @@ TEST(MixtureFactor, Dim) { EXPECT_LONGS_EQUAL(1, mixtureFactor.dim()); } -/* ************************************************************************* */ -// Test components with differing means -TEST(MixtureFactor, DifferentMeans) { - DiscreteKey m1(M(1), 2), m2(M(2), 2); - - Values values; - double x1 = 0.0, x2 = 1.75, x3 = 2.60; - values.insert(X(1), x1); - values.insert(X(2), x2); - values.insert(X(3), x3); - - auto model0 = noiseModel::Isotropic::Sigma(1, 1e-0); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-0); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-0); - - auto f0 = std::make_shared>(X(1), X(2), 0.0, model0); - auto f1 = std::make_shared>(X(1), X(2), 2.0, model1); - std::vector factors{f0, f1}; - - MixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); - HybridNonlinearFactorGraph hnfg; - hnfg.push_back(mixtureFactor); - - f0 = std::make_shared>(X(2), X(3), 0.0, model0); - f1 = std::make_shared>(X(2), X(3), 2.0, model1); - std::vector factors23{f0, f1}; - hnfg.push_back(MixtureFactor({X(2), X(3)}, {m2}, factors23)); - - auto prior = PriorFactor(X(1), x1, prior_noise); - hnfg.push_back(prior); - - hnfg.emplace_shared>(X(2), 2.0, prior_noise); - - auto hgfg = hnfg.linearize(values); - auto bn = hgfg->eliminateSequential(); - HybridValues actual = bn->optimize(); - - HybridValues expected( - VectorValues{ - {X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}, {X(3), Vector1(-0.6)}}, - DiscreteValues{{M(1), 1}, {M(2), 0}}); - - EXPECT(assert_equal(expected, actual)); - - { - DiscreteValues dv{{M(1), 0}, {M(2), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 0}, {M(2), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}, {M(2), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}, {M(2), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); - } -} - -/* ************************************************************************* */ -// Test components with differing covariances -TEST(MixtureFactor, DifferentCovariances) { - DiscreteKey m1(M(1), 2); - - Values values; - double x1 = 1.0, x2 = 1.0; - values.insert(X(1), x1); - values.insert(X(2), x2); - - double between = 0.0; - - auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - - auto f0 = - std::make_shared>(X(1), X(2), between, model0); - auto f1 = - std::make_shared>(X(1), X(2), between, model1); - std::vector factors{f0, f1}; - - // Create via toFactorGraph - using symbol_shorthand::Z; - Matrix H0_1, H0_2, H1_1, H1_2; - Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); - std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H0_1 /*Sp1*/}, - {X(2), H0_2 /*Tp2*/}}; - - Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); - std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H1_1 /*Sp1*/}, - {X(2), H1_2 /*Tp2*/}}; - auto gm = new gtsam::GaussianMixture( - {Z(1)}, {X(1), X(2)}, {m1}, - {std::make_shared(terms0, 1, -d0, model0), - std::make_shared(terms1, 1, -d1, model1)}); - gtsam::HybridBayesNet bn; - bn.emplace_back(gm); - - gtsam::VectorValues measurements; - measurements.insert(Z(1), gtsam::Z_1x1); - // Create FG with single GaussianMixtureFactor - HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); - - // Linearized prior factor on X1 - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); - mixture_fg.push_back(prior); - - auto hbn = mixture_fg.eliminateSequential(); - - VectorValues cv; - cv.insert(X(1), Vector1(0.0)); - cv.insert(X(2), Vector1(0.0)); - - // Check that we get different error values at the MLE point μ. - AlgebraicDecisionTree errorTree = hbn->errorTree(cv); - - HybridValues hv0(cv, DiscreteValues{{M(1), 0}}); - HybridValues hv1(cv, DiscreteValues{{M(1), 1}}); - - auto cond0 = hbn->at(0)->asMixture(); - auto cond1 = hbn->at(1)->asMixture(); - auto discrete_cond = hbn->at(2)->asDiscrete(); - AlgebraicDecisionTree expectedErrorTree(m1, 9.90348755254, - 0.69314718056); - EXPECT(assert_equal(expectedErrorTree, errorTree)); -} - /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/linear/GaussianBayesNet.cpp b/gtsam/linear/GaussianBayesNet.cpp index 861e19cc9..b04878ac5 100644 --- a/gtsam/linear/GaussianBayesNet.cpp +++ b/gtsam/linear/GaussianBayesNet.cpp @@ -243,25 +243,5 @@ namespace gtsam { } /* ************************************************************************* */ - double GaussianBayesNet::logNormalizationConstant() const { - /* - normalization constant = 1.0 / sqrt((2*pi)^n*det(Sigma)) - logConstant = -0.5 * n*log(2*pi) - 0.5 * log det(Sigma) - - log det(Sigma)) = -2.0 * logDeterminant() - thus, logConstant = -0.5*n*log(2*pi) + logDeterminant() - - BayesNet logConstant = sum(-0.5*n_i*log(2*pi) + logDeterminant_i()) - = sum(-0.5*n_i*log(2*pi)) + sum(logDeterminant_i()) - = sum(-0.5*n_i*log(2*pi)) + bn->logDeterminant() - */ - double logNormConst = 0.0; - for (const sharedConditional& cg : *this) { - logNormConst += cg->logNormalizationConstant(); - } - return logNormConst; - } - - /* ************************************************************************* */ } // namespace gtsam diff --git a/gtsam/linear/GaussianBayesNet.h b/gtsam/linear/GaussianBayesNet.h index ea1cb8603..19781d1e6 100644 --- a/gtsam/linear/GaussianBayesNet.h +++ b/gtsam/linear/GaussianBayesNet.h @@ -82,12 +82,6 @@ namespace gtsam { /** Check equality */ bool equals(const This& bn, double tol = 1e-9) const; - /// Check exact equality. - friend bool operator==(const GaussianBayesNet& lhs, - const GaussianBayesNet& rhs) { - return lhs.isEqual(rhs); - } - /// print graph void print( const std::string& s = "", @@ -234,14 +228,6 @@ namespace gtsam { * @return The determinant */ double logDeterminant() const; - /** - * @brief Get the log of the normalization constant corresponding to the - * joint Gaussian density represented by this Bayes net. - * - * @return double - */ - double logNormalizationConstant() const; - /** * Backsubstitute with a different RHS vector than the one stored in this BayesNet. * gy=inv(R*inv(Sigma))*gx diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index f986eed02..0112835aa 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -121,7 +121,6 @@ namespace gtsam { const auto mean = solve({}); // solve for mean. mean.print(" mean", formatter); } - cout << " logNormalizationConstant: " << logNormalizationConstant() << std::endl; if (model_) model_->print(" Noise model: "); else @@ -185,13 +184,8 @@ namespace gtsam { double GaussianConditional::logNormalizationConstant() const { constexpr double log2pi = 1.8378770664093454835606594728112; size_t n = d().size(); - // Sigma = (R'R)^{-1}, det(Sigma) = det((R'R)^{-1}) = det(R'R)^{-1} - // log det(Sigma) = -log(det(R'R)) = -2*log(det(R)) - // Hence, log det(Sigma)) = -2.0 * logDeterminant() - // which gives log = -0.5*n*log(2*pi) - 0.5*(-2.0 * logDeterminant()) - // = -0.5*n*log(2*pi) + (0.5*2.0 * logDeterminant()) - // = -0.5*n*log(2*pi) + logDeterminant() - return -0.5 * n * log2pi + logDeterminant(); + // log det(Sigma)) = - 2.0 * logDeterminant() + return - 0.5 * n * log2pi + logDeterminant(); } /* ************************************************************************* */ diff --git a/gtsam/linear/VectorValues.h b/gtsam/linear/VectorValues.h index 99ee4eb83..7fbd43ffc 100644 --- a/gtsam/linear/VectorValues.h +++ b/gtsam/linear/VectorValues.h @@ -263,11 +263,6 @@ namespace gtsam { /** equals required by Testable for unit testing */ bool equals(const VectorValues& x, double tol = 1e-9) const; - /// Check exact equality. - friend bool operator==(const VectorValues& lhs, const VectorValues& rhs) { - return lhs.equals(rhs); - } - /// @{ /// @name Advanced Interface /// @{ diff --git a/gtsam/linear/linear.i b/gtsam/linear/linear.i index 3a629f349..4b0963b8d 100644 --- a/gtsam/linear/linear.i +++ b/gtsam/linear/linear.i @@ -510,17 +510,12 @@ virtual class GaussianConditional : gtsam::JacobianFactor { GaussianConditional(size_t key, gtsam::Vector d, gtsam::Matrix R, size_t name1, gtsam::Matrix S, size_t name2, gtsam::Matrix T, const gtsam::noiseModel::Diagonal* sigmas); - GaussianConditional(const vector> terms, - size_t nrFrontals, gtsam::Vector d, - const gtsam::noiseModel::Diagonal* sigmas); // Constructors with no noise model GaussianConditional(size_t key, gtsam::Vector d, gtsam::Matrix R); GaussianConditional(size_t key, gtsam::Vector d, gtsam::Matrix R, size_t name1, gtsam::Matrix S); GaussianConditional(size_t key, gtsam::Vector d, gtsam::Matrix R, size_t name1, gtsam::Matrix S, size_t name2, gtsam::Matrix T); - GaussianConditional(const gtsam::KeyVector& keys, size_t nrFrontals, - const gtsam::VerticalBlockMatrix& augmentedMatrix); // Named constructors static gtsam::GaussianConditional FromMeanAndStddev(gtsam::Key key, diff --git a/gtsam/linear/tests/testGaussianConditional.cpp b/gtsam/linear/tests/testGaussianConditional.cpp index dcd821889..a4a722012 100644 --- a/gtsam/linear/tests/testGaussianConditional.cpp +++ b/gtsam/linear/tests/testGaussianConditional.cpp @@ -516,7 +516,6 @@ TEST(GaussianConditional, Print) { " d = [ 20 40 ]\n" " mean: 1 elements\n" " x0: 20 40\n" - " logNormalizationConstant: -4.0351\n" "isotropic dim=2 sigma=3\n"; EXPECT(assert_print_equal(expected, conditional, "GaussianConditional")); @@ -531,7 +530,6 @@ TEST(GaussianConditional, Print) { " S[x1] = [ -1 -2 ]\n" " [ -3 -4 ]\n" " d = [ 20 40 ]\n" - " logNormalizationConstant: -4.0351\n" "isotropic dim=2 sigma=3\n"; EXPECT(assert_print_equal(expected1, conditional1, "GaussianConditional")); @@ -547,7 +545,6 @@ TEST(GaussianConditional, Print) { " S[y1] = [ -5 -6 ]\n" " [ -7 -8 ]\n" " d = [ 20 40 ]\n" - " logNormalizationConstant: -4.0351\n" "isotropic dim=2 sigma=3\n"; EXPECT(assert_print_equal(expected2, conditional2, "GaussianConditional")); } From cea84b8c8945249f57905b3162d9e7cd7369d83f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 15:50:39 -0400 Subject: [PATCH 074/398] reduce the diff even more --- gtsam/hybrid/GaussianMixture.cpp | 2 -- gtsam/hybrid/HybridBayesNet.cpp | 1 - gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp | 3 --- 3 files changed, 6 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 531a9f2e5..c105a329e 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -145,8 +145,6 @@ void GaussianMixture::print(const std::string &s, std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), "; } std::cout << "\n"; - std::cout << " logNormalizationConstant: " << logConstant_ << "\n" - << std::endl; conditionals_.print( "", [&](Key k) { return formatter(k); }, [&](const GaussianConditional::shared_ptr &gf) -> std::string { diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index bb0f23686..b02967555 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -221,7 +221,6 @@ GaussianBayesNet HybridBayesNet::choose( HybridValues HybridBayesNet::optimize() const { // Collect all the discrete factors to compute MPE DiscreteBayesNet discrete_bn; - for (auto &&conditional : *this) { if (conditional->isDiscrete()) { discrete_bn.push_back(conditional->asDiscrete()); diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index cf56e35d6..93081d309 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -675,7 +675,6 @@ factor 6: P( m1 | m0 ): size: 3 conditional 0: Hybrid P( x0 | x1 m0) Discrete Keys = (m0, 2), - Choice(m0) 0 Leaf p(x0 | x1) R = [ 10.0499 ] @@ -691,7 +690,6 @@ conditional 0: Hybrid P( x0 | x1 m0) conditional 1: Hybrid P( x1 | x2 m0 m1) Discrete Keys = (m0, 2), (m1, 2), - Choice(m1) 0 Choice(m0) 0 0 Leaf p(x1 | x2) @@ -721,7 +719,6 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) conditional 2: Hybrid P( x2 | m0 m1) Discrete Keys = (m0, 2), (m1, 2), - Choice(m1) 0 Choice(m0) 0 0 Leaf p(x2) From 7269d80b1c1077921944754487783a29e8fc354e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 15:57:36 -0400 Subject: [PATCH 075/398] improvements to GaussianConditional and GaussianBayesNet --- gtsam/linear/GaussianBayesNet.cpp | 20 +++++++++++++++++++ gtsam/linear/GaussianBayesNet.h | 14 +++++++++++++ gtsam/linear/GaussianConditional.cpp | 10 ++++++++-- gtsam/linear/VectorValues.h | 5 +++++ gtsam/linear/linear.i | 5 +++++ .../linear/tests/testGaussianConditional.cpp | 3 +++ 6 files changed, 55 insertions(+), 2 deletions(-) diff --git a/gtsam/linear/GaussianBayesNet.cpp b/gtsam/linear/GaussianBayesNet.cpp index b04878ac5..861e19cc9 100644 --- a/gtsam/linear/GaussianBayesNet.cpp +++ b/gtsam/linear/GaussianBayesNet.cpp @@ -243,5 +243,25 @@ namespace gtsam { } /* ************************************************************************* */ + double GaussianBayesNet::logNormalizationConstant() const { + /* + normalization constant = 1.0 / sqrt((2*pi)^n*det(Sigma)) + logConstant = -0.5 * n*log(2*pi) - 0.5 * log det(Sigma) + + log det(Sigma)) = -2.0 * logDeterminant() + thus, logConstant = -0.5*n*log(2*pi) + logDeterminant() + + BayesNet logConstant = sum(-0.5*n_i*log(2*pi) + logDeterminant_i()) + = sum(-0.5*n_i*log(2*pi)) + sum(logDeterminant_i()) + = sum(-0.5*n_i*log(2*pi)) + bn->logDeterminant() + */ + double logNormConst = 0.0; + for (const sharedConditional& cg : *this) { + logNormConst += cg->logNormalizationConstant(); + } + return logNormConst; + } + + /* ************************************************************************* */ } // namespace gtsam diff --git a/gtsam/linear/GaussianBayesNet.h b/gtsam/linear/GaussianBayesNet.h index 19781d1e6..ea1cb8603 100644 --- a/gtsam/linear/GaussianBayesNet.h +++ b/gtsam/linear/GaussianBayesNet.h @@ -82,6 +82,12 @@ namespace gtsam { /** Check equality */ bool equals(const This& bn, double tol = 1e-9) const; + /// Check exact equality. + friend bool operator==(const GaussianBayesNet& lhs, + const GaussianBayesNet& rhs) { + return lhs.isEqual(rhs); + } + /// print graph void print( const std::string& s = "", @@ -228,6 +234,14 @@ namespace gtsam { * @return The determinant */ double logDeterminant() const; + /** + * @brief Get the log of the normalization constant corresponding to the + * joint Gaussian density represented by this Bayes net. + * + * @return double + */ + double logNormalizationConstant() const; + /** * Backsubstitute with a different RHS vector than the one stored in this BayesNet. * gy=inv(R*inv(Sigma))*gx diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index 0112835aa..f986eed02 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -121,6 +121,7 @@ namespace gtsam { const auto mean = solve({}); // solve for mean. mean.print(" mean", formatter); } + cout << " logNormalizationConstant: " << logNormalizationConstant() << std::endl; if (model_) model_->print(" Noise model: "); else @@ -184,8 +185,13 @@ namespace gtsam { double GaussianConditional::logNormalizationConstant() const { constexpr double log2pi = 1.8378770664093454835606594728112; size_t n = d().size(); - // log det(Sigma)) = - 2.0 * logDeterminant() - return - 0.5 * n * log2pi + logDeterminant(); + // Sigma = (R'R)^{-1}, det(Sigma) = det((R'R)^{-1}) = det(R'R)^{-1} + // log det(Sigma) = -log(det(R'R)) = -2*log(det(R)) + // Hence, log det(Sigma)) = -2.0 * logDeterminant() + // which gives log = -0.5*n*log(2*pi) - 0.5*(-2.0 * logDeterminant()) + // = -0.5*n*log(2*pi) + (0.5*2.0 * logDeterminant()) + // = -0.5*n*log(2*pi) + logDeterminant() + return -0.5 * n * log2pi + logDeterminant(); } /* ************************************************************************* */ diff --git a/gtsam/linear/VectorValues.h b/gtsam/linear/VectorValues.h index 7fbd43ffc..99ee4eb83 100644 --- a/gtsam/linear/VectorValues.h +++ b/gtsam/linear/VectorValues.h @@ -263,6 +263,11 @@ namespace gtsam { /** equals required by Testable for unit testing */ bool equals(const VectorValues& x, double tol = 1e-9) const; + /// Check exact equality. + friend bool operator==(const VectorValues& lhs, const VectorValues& rhs) { + return lhs.equals(rhs); + } + /// @{ /// @name Advanced Interface /// @{ diff --git a/gtsam/linear/linear.i b/gtsam/linear/linear.i index 4b0963b8d..3a629f349 100644 --- a/gtsam/linear/linear.i +++ b/gtsam/linear/linear.i @@ -510,12 +510,17 @@ virtual class GaussianConditional : gtsam::JacobianFactor { GaussianConditional(size_t key, gtsam::Vector d, gtsam::Matrix R, size_t name1, gtsam::Matrix S, size_t name2, gtsam::Matrix T, const gtsam::noiseModel::Diagonal* sigmas); + GaussianConditional(const vector> terms, + size_t nrFrontals, gtsam::Vector d, + const gtsam::noiseModel::Diagonal* sigmas); // Constructors with no noise model GaussianConditional(size_t key, gtsam::Vector d, gtsam::Matrix R); GaussianConditional(size_t key, gtsam::Vector d, gtsam::Matrix R, size_t name1, gtsam::Matrix S); GaussianConditional(size_t key, gtsam::Vector d, gtsam::Matrix R, size_t name1, gtsam::Matrix S, size_t name2, gtsam::Matrix T); + GaussianConditional(const gtsam::KeyVector& keys, size_t nrFrontals, + const gtsam::VerticalBlockMatrix& augmentedMatrix); // Named constructors static gtsam::GaussianConditional FromMeanAndStddev(gtsam::Key key, diff --git a/gtsam/linear/tests/testGaussianConditional.cpp b/gtsam/linear/tests/testGaussianConditional.cpp index a4a722012..dcd821889 100644 --- a/gtsam/linear/tests/testGaussianConditional.cpp +++ b/gtsam/linear/tests/testGaussianConditional.cpp @@ -516,6 +516,7 @@ TEST(GaussianConditional, Print) { " d = [ 20 40 ]\n" " mean: 1 elements\n" " x0: 20 40\n" + " logNormalizationConstant: -4.0351\n" "isotropic dim=2 sigma=3\n"; EXPECT(assert_print_equal(expected, conditional, "GaussianConditional")); @@ -530,6 +531,7 @@ TEST(GaussianConditional, Print) { " S[x1] = [ -1 -2 ]\n" " [ -3 -4 ]\n" " d = [ 20 40 ]\n" + " logNormalizationConstant: -4.0351\n" "isotropic dim=2 sigma=3\n"; EXPECT(assert_print_equal(expected1, conditional1, "GaussianConditional")); @@ -545,6 +547,7 @@ TEST(GaussianConditional, Print) { " S[y1] = [ -5 -6 ]\n" " [ -7 -8 ]\n" " d = [ 20 40 ]\n" + " logNormalizationConstant: -4.0351\n" "isotropic dim=2 sigma=3\n"; EXPECT(assert_print_equal(expected2, conditional2, "GaussianConditional")); } From 7fab3f8cc3859f5a693b5f017e9cc9aaa7eda735 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:21:46 -0400 Subject: [PATCH 076/398] improved MixtureFactor tests --- gtsam/hybrid/tests/testMixtureFactor.cpp | 153 +++++++++++++++++++++++ 1 file changed, 153 insertions(+) diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index 0b2564403..48b193eeb 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -18,6 +18,9 @@ #include #include +#include +#include +#include #include #include #include @@ -115,6 +118,156 @@ TEST(MixtureFactor, Dim) { EXPECT_LONGS_EQUAL(1, mixtureFactor.dim()); } +/* ************************************************************************* */ +// Test components with differing means +TEST(MixtureFactor, DifferentMeans) { + DiscreteKey m1(M(1), 2), m2(M(2), 2); + + Values values; + double x1 = 0.0, x2 = 1.75, x3 = 2.60; + values.insert(X(1), x1); + values.insert(X(2), x2); + values.insert(X(3), x3); + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e-0); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-0); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-0); + + auto f0 = std::make_shared>(X(1), X(2), 0.0, model0); + auto f1 = std::make_shared>(X(1), X(2), 2.0, model1); + std::vector factors{f0, f1}; + + MixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); + HybridNonlinearFactorGraph hnfg; + hnfg.push_back(mixtureFactor); + + f0 = std::make_shared>(X(2), X(3), 0.0, model0); + f1 = std::make_shared>(X(2), X(3), 2.0, model1); + std::vector factors23{f0, f1}; + hnfg.push_back(MixtureFactor({X(2), X(3)}, {m2}, factors23)); + + auto prior = PriorFactor(X(1), x1, prior_noise); + hnfg.push_back(prior); + + hnfg.emplace_shared>(X(2), 2.0, prior_noise); + + auto hgfg = hnfg.linearize(values); + auto bn = hgfg->eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{ + {X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}, {X(3), Vector1(-0.6)}}, + DiscreteValues{{M(1), 1}, {M(2), 0}}); + + EXPECT(assert_equal(expected, actual)); + + { + DiscreteValues dv{{M(1), 0}, {M(2), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 0}, {M(2), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}, {M(2), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}, {M(2), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + } +} + +/* ************************************************************************* */ +// Test components with differing covariances +TEST(MixtureFactor, DifferentCovariances) { + DiscreteKey m1(M(1), 2); + + Values values; + double x1 = 1.0, x2 = 1.0; + values.insert(X(1), x1); + values.insert(X(2), x2); + + double between = 0.0; + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + + auto f0 = + std::make_shared>(X(1), X(2), between, model0); + auto f1 = + std::make_shared>(X(1), X(2), between, model1); + std::vector factors{f0, f1}; + + // Create via toFactorGraph + using symbol_shorthand::Z; + Matrix H0_1, H0_2, H1_1, H1_2; + Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); + std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H0_1 /*Sp1*/}, + {X(2), H0_2 /*Tp2*/}}; + + Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); + std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H1_1 /*Sp1*/}, + {X(2), H1_2 /*Tp2*/}}; + auto gm = new gtsam::GaussianMixture( + {Z(1)}, {X(1), X(2)}, {m1}, + {std::make_shared(terms0, 1, -d0, model0), + std::make_shared(terms1, 1, -d1, model1)}); + gtsam::HybridBayesNet bn; + bn.emplace_back(gm); + + gtsam::VectorValues measurements; + measurements.insert(Z(1), gtsam::Z_1x1); + // Create FG with single GaussianMixtureFactor + HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); + + // Linearized prior factor on X1 + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + mixture_fg.push_back(prior); + + auto hbn = mixture_fg.eliminateSequential(); + + VectorValues cv; + cv.insert(X(1), Vector1(0.0)); + cv.insert(X(2), Vector1(0.0)); + // P(m1) = [0.5, 0.5], so we should pick 0 + DiscreteValues dv; + dv.insert({M(1), 0}); + HybridValues expected_values(cv, dv); + + HybridValues actual_values = hbn->optimize(); + EXPECT(assert_equal(expected_values, actual_values)); + + // Check that we get different error values at the MLE point μ. + AlgebraicDecisionTree errorTree = hbn->errorTree(cv); + + HybridValues hv0(cv, DiscreteValues{{M(1), 0}}); + HybridValues hv1(cv, DiscreteValues{{M(1), 1}}); + + AlgebraicDecisionTree expectedErrorTree(m1, 9.90348755254, + 0.69314718056); + EXPECT(assert_equal(expectedErrorTree, errorTree)); +} + /* ************************************************************************* */ int main() { TestResult tr; From ccad6f6c48febbabfd989a619313725fc37f2a49 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:22:07 -0400 Subject: [PATCH 077/398] print main logNormalizationConstant in GaussianMixture --- gtsam/hybrid/GaussianMixture.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index c105a329e..531a9f2e5 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -145,6 +145,8 @@ void GaussianMixture::print(const std::string &s, std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), "; } std::cout << "\n"; + std::cout << " logNormalizationConstant: " << logConstant_ << "\n" + << std::endl; conditionals_.print( "", [&](Key k) { return formatter(k); }, [&](const GaussianConditional::shared_ptr &gf) -> std::string { From 2007ef53de47f75a317509acf7fbd7828371898e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:22:34 -0400 Subject: [PATCH 078/398] use DiscreteFactorGraph directly --- gtsam/hybrid/HybridBayesNet.cpp | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index b02967555..1d01baed2 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -220,15 +220,16 @@ GaussianBayesNet HybridBayesNet::choose( /* ************************************************************************* */ HybridValues HybridBayesNet::optimize() const { // Collect all the discrete factors to compute MPE - DiscreteBayesNet discrete_bn; + DiscreteFactorGraph discrete_fg; + for (auto &&conditional : *this) { if (conditional->isDiscrete()) { - discrete_bn.push_back(conditional->asDiscrete()); + discrete_fg.push_back(conditional->asDiscrete()); } } // Solve for the MPE - DiscreteValues mpe = DiscreteFactorGraph(discrete_bn).optimize(); + DiscreteValues mpe = discrete_fg.optimize(); // Given the MPE, compute the optimal continuous values. return HybridValues(optimize(mpe), mpe); From 598edfacce2d80229b178a2b0f1dd271a79512c3 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:28:28 -0400 Subject: [PATCH 079/398] improve GaussianMixture by checking for invalid conditionals and adding 2 new methods --- gtsam/hybrid/GaussianMixture.cpp | 67 ++++++++++++++++++++++++++++++-- gtsam/hybrid/GaussianMixture.h | 15 +++++++ gtsam/hybrid/HybridFactor.h | 3 ++ 3 files changed, 81 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 531a9f2e5..36a34226b 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -24,6 +24,7 @@ #include #include #include +#include #include namespace gtsam { @@ -92,6 +93,35 @@ GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { return {conditionals_, wrap}; } +/* +*******************************************************************************/ +GaussianBayesNetTree GaussianMixture::add( + const GaussianBayesNetTree &sum) const { + using Y = GaussianBayesNet; + auto add = [](const Y &graph1, const Y &graph2) { + auto result = graph1; + if (graph2.size() == 0) { + return GaussianBayesNet(); + } + result.push_back(graph2); + return result; + }; + const auto tree = asGaussianBayesNetTree(); + return sum.empty() ? tree : sum.apply(tree, add); +} + +/* *******************************************************************************/ +GaussianBayesNetTree GaussianMixture::asGaussianBayesNetTree() const { + auto wrap = [](const GaussianConditional::shared_ptr &gc) { + if (gc) { + return GaussianBayesNet{gc}; + } else { + return GaussianBayesNet(); + } + }; + return {conditionals_, wrap}; +} + /* *******************************************************************************/ size_t GaussianMixture::nrComponents() const { size_t total = 0; @@ -318,8 +348,15 @@ AlgebraicDecisionTree GaussianMixture::logProbability( AlgebraicDecisionTree GaussianMixture::errorTree( const VectorValues &continuousValues) const { auto errorFunc = [&](const GaussianConditional::shared_ptr &conditional) { - return conditional->error(continuousValues) + // - logConstant_ - conditional->logNormalizationConstant(); + // Check if valid pointer + if (conditional) { + return conditional->error(continuousValues) + // + logConstant_ - conditional->logNormalizationConstant(); + } else { + // If not valid, pointer, it means this conditional was pruned, + // so we return maximum error. + return std::numeric_limits::max(); + } }; DecisionTree error_tree(conditionals_, errorFunc); return error_tree; @@ -327,10 +364,32 @@ AlgebraicDecisionTree GaussianMixture::errorTree( /* *******************************************************************************/ double GaussianMixture::error(const HybridValues &values) const { + // Check if discrete keys in discrete assignment are + // present in the GaussianMixture + KeyVector dKeys = this->discreteKeys_.indices(); + bool valid_assignment = false; + for (auto &&kv : values.discrete()) { + if (std::find(dKeys.begin(), dKeys.end(), kv.first) != dKeys.end()) { + valid_assignment = true; + break; + } + } + + // The discrete assignment is not valid so we return 0.0 erorr. + if (!valid_assignment) { + return 0.0; + } + // Directly index to get the conditional, no need to build the whole tree. auto conditional = conditionals_(values.discrete()); - return conditional->error(values.continuous()) + // - logConstant_ - conditional->logNormalizationConstant(); + if (conditional) { + return conditional->error(values.continuous()) + // + logConstant_ - conditional->logNormalizationConstant(); + } else { + // If not valid, pointer, it means this conditional was pruned, + // so we return maximum error. + return std::numeric_limits::max(); + } } /* *******************************************************************************/ diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index c1ef504f8..bfa342dcf 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -72,6 +72,12 @@ class GTSAM_EXPORT GaussianMixture */ GaussianFactorGraphTree asGaussianFactorGraphTree() const; + /** + * @brief Convert a DecisionTree of conditionals into + * a DecisionTree of Gaussian Bayes nets. + */ + GaussianBayesNetTree asGaussianBayesNetTree() const; + /** * @brief Helper function to get the pruner functor. * @@ -250,6 +256,15 @@ class GTSAM_EXPORT GaussianMixture * @return GaussianFactorGraphTree */ GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const; + + /** + * @brief Merge the Gaussian Bayes Nets in `this` and `sum` while + * maintaining the decision tree structure. + * + * @param sum Decision Tree of Gaussian Bayes Nets + * @return GaussianBayesNetTree + */ + GaussianBayesNetTree add(const GaussianBayesNetTree &sum) const; /// @} private: diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index afd1c8032..418489d66 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -13,6 +13,7 @@ * @file HybridFactor.h * @date Mar 11, 2022 * @author Fan Jiang + * @author Varun Agrawal */ #pragma once @@ -33,6 +34,8 @@ class HybridValues; /// Alias for DecisionTree of GaussianFactorGraphs using GaussianFactorGraphTree = DecisionTree; +/// Alias for DecisionTree of GaussianBayesNets +using GaussianBayesNetTree = DecisionTree; KeyVector CollectKeys(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys); From eef9765e4a16ca90f1bf3c953500e99c2ec4b2e2 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:29:02 -0400 Subject: [PATCH 080/398] normalize the discrete factor at the continuous-discrete boundary --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 28 +++++++++++++++++----- 1 file changed, 22 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index b764dc9e0..bf11a50fc 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -279,21 +279,37 @@ GaussianFactorGraphTree removeEmpty(const GaussianFactorGraphTree &sum) { using Result = std::pair, GaussianMixtureFactor::sharedFactor>; -// Integrate the probability mass in the last continuous conditional using -// the unnormalized probability q(μ;m) = exp(-error(μ;m)) at the mean. -// discrete_probability = exp(-error(μ;m)) * sqrt(det(2π Σ_m)) +/** + * Compute the probability q(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m) + * from the residual error at the mean μ. + * The residual error contains no keys, and only + * depends on the discrete separator if present. + */ static std::shared_ptr createDiscreteFactor( const DecisionTree &eliminationResults, const DiscreteKeys &discreteSeparator) { - auto probability = [&](const Result &pair) -> double { + auto logProbability = [&](const Result &pair) -> double { const auto &[conditional, factor] = pair; static const VectorValues kEmpty; // If the factor is not null, it has no keys, just contains the residual. if (!factor) return 1.0; // TODO(dellaert): not loving this. - return exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); + + // Logspace version of: + // exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); + // We take negative of the logNormalizationConstant `log(1/k)` + // to get `log(k)`. + return -factor->error(kEmpty) + (-conditional->logNormalizationConstant()); }; - DecisionTree probabilities(eliminationResults, probability); + AlgebraicDecisionTree logProbabilities( + DecisionTree(eliminationResults, logProbability)); + + // Perform normalization + double max_log = logProbabilities.max(); + AlgebraicDecisionTree probabilities = DecisionTree( + logProbabilities, + [&max_log](const double x) { return exp(x - max_log); }); + probabilities = probabilities.normalize(probabilities.sum()); return std::make_shared(discreteSeparator, probabilities); } From ea104c4b8399f27aa48fc95f0888d304c8ff39d2 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:29:40 -0400 Subject: [PATCH 081/398] support for varying normalizers in GaussianMixtureFactor --- gtsam/hybrid/GaussianMixtureFactor.cpp | 83 +++++++++++++++++++++++++- gtsam/hybrid/GaussianMixtureFactor.h | 18 ++++-- 2 files changed, 92 insertions(+), 9 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index a3db16d04..e519cefe6 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -28,11 +28,86 @@ namespace gtsam { +/** + * @brief Helper function to correct the [A|b] matrices in the factor components + * with the normalizer values. + * This is done by storing the normalizer value in + * the `b` vector as an additional row. + * + * @param factors DecisionTree of GaussianFactor shared pointers. + * @param varyingNormalizers Flag indicating the normalizers are different for + * each component. + * @return GaussianMixtureFactor::Factors + */ +GaussianMixtureFactor::Factors correct( + const GaussianMixtureFactor::Factors &factors, bool varyingNormalizers) { + if (!varyingNormalizers) { + return factors; + } + + // First compute all the sqrt(|2 pi Sigma|) terms + auto computeNormalizers = [](const GaussianMixtureFactor::sharedFactor &gf) { + auto jf = std::dynamic_pointer_cast(gf); + // If we have, say, a Hessian factor, then no need to do anything + if (!jf) return 0.0; + + auto model = jf->get_model(); + // If there is no noise model, there is nothing to do. + if (!model) { + return 0.0; + } + // Since noise models are Gaussian, we can get the logDeterminant using the + // same trick as in GaussianConditional + double logDetR = + model->R().diagonal().unaryExpr([](double x) { return log(x); }).sum(); + double logDeterminantSigma = -2.0 * logDetR; + + size_t n = model->dim(); + constexpr double log2pi = 1.8378770664093454835606594728112; + return n * log2pi + logDeterminantSigma; + }; + + AlgebraicDecisionTree log_normalizers = + DecisionTree(factors, computeNormalizers); + + // Find the minimum value so we can "proselytize" to positive values. + // Done because we can't have sqrt of negative numbers. + double min_log_normalizer = log_normalizers.min(); + log_normalizers = log_normalizers.apply( + [&min_log_normalizer](double n) { return n - min_log_normalizer; }); + + // Finally, update the [A|b] matrices. + auto update = [&log_normalizers]( + const Assignment &assignment, + const GaussianMixtureFactor::sharedFactor &gf) { + auto jf = std::dynamic_pointer_cast(gf); + if (!jf) return gf; + // If there is no noise model, there is nothing to do. + if (!jf->get_model()) return gf; + // If the log_normalizer is 0, do nothing + if (log_normalizers(assignment) == 0.0) return gf; + + GaussianFactorGraph gfg; + gfg.push_back(jf); + + Vector c(1); + c << std::sqrt(log_normalizers(assignment)); + auto constantFactor = std::make_shared(c); + + gfg.push_back(constantFactor); + return std::dynamic_pointer_cast( + std::make_shared(gfg)); + }; + return factors.apply(update); +} + /* *******************************************************************************/ GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors) - : Base(continuousKeys, discreteKeys), factors_(factors) {} + const Factors &factors, + bool varyingNormalizers) + : Base(continuousKeys, discreteKeys), + factors_(correct(factors, varyingNormalizers)) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { @@ -54,7 +129,9 @@ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { /* *******************************************************************************/ void GaussianMixtureFactor::print(const std::string &s, const KeyFormatter &formatter) const { - HybridFactor::print(s, formatter); + std::cout << (s.empty() ? "" : s + "\n"); + std::cout << "GaussianMixtureFactor" << std::endl; + HybridFactor::print("", formatter); std::cout << "{\n"; if (factors_.empty()) { std::cout << " empty" << std::endl; diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 67d12ddb0..588501bbe 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -82,10 +82,13 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * their cardinalities. * @param factors The decision tree of Gaussian factors stored as the mixture * density. + * @param varyingNormalizers Flag indicating factor components have varying + * normalizer values. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors); + const Factors &factors, + bool varyingNormalizers = false); /** * @brief Construct a new GaussianMixtureFactor object using a vector of @@ -94,12 +97,16 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. + * @param varyingNormalizers Flag indicating factor components have varying + * normalizer values. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const std::vector &factors) + const std::vector &factors, + bool varyingNormalizers = false) : GaussianMixtureFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors)) {} + Factors(discreteKeys, factors), + varyingNormalizers) {} /// @} /// @name Testable @@ -107,9 +114,8 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { bool equals(const HybridFactor &lf, double tol = 1e-9) const override; - void print( - const std::string &s = "GaussianMixtureFactor\n", - const KeyFormatter &formatter = DefaultKeyFormatter) const override; + void print(const std::string &s = "", const KeyFormatter &formatter = + DefaultKeyFormatter) const override; /// @} /// @name Standard API From 5e3093e3e2a693eda24f6a02d5a4ee6d54ef1a1c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:30:32 -0400 Subject: [PATCH 082/398] tests for verification --- .../tests/testGaussianMixtureFactor.cpp | 247 +++++++++++++++++- .../tests/testHybridNonlinearFactorGraph.cpp | 18 ++ 2 files changed, 260 insertions(+), 5 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 9cc7e6bfd..47b9ddc99 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -22,9 +22,13 @@ #include #include #include +#include +#include #include #include #include +#include +#include // Include for test suite #include @@ -56,7 +60,6 @@ TEST(GaussianMixtureFactor, Sum) { auto b = Matrix::Zero(2, 1); Vector2 sigmas; sigmas << 1, 2; - auto model = noiseModel::Diagonal::Sigmas(sigmas, true); auto f10 = std::make_shared(X(1), A1, X(2), A2, b); auto f11 = std::make_shared(X(1), A1, X(2), A2, b); @@ -106,7 +109,8 @@ TEST(GaussianMixtureFactor, Printing) { GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); std::string expected = - R"(Hybrid [x1 x2; 1]{ + R"(GaussianMixtureFactor +Hybrid [x1 x2; 1]{ Choice(1) 0 Leaf : A[x1] = [ @@ -178,7 +182,8 @@ TEST(GaussianMixtureFactor, Error) { continuousValues.insert(X(2), Vector2(1, 1)); // error should return a tree of errors, with nodes for each discrete value. - AlgebraicDecisionTree error_tree = mixtureFactor.errorTree(continuousValues); + AlgebraicDecisionTree error_tree = + mixtureFactor.errorTree(continuousValues); std::vector discrete_keys = {m1}; // Error values for regression test @@ -191,8 +196,240 @@ TEST(GaussianMixtureFactor, Error) { DiscreteValues discreteValues; discreteValues[m1.first] = 1; EXPECT_DOUBLES_EQUAL( - 4.0, mixtureFactor.error({continuousValues, discreteValues}), - 1e-9); + 4.0, mixtureFactor.error({continuousValues, discreteValues}), 1e-9); +} + +/* ************************************************************************* */ +// Test components with differing means +TEST(GaussianMixtureFactor, DifferentMeans) { + DiscreteKey m1(M(1), 2), m2(M(2), 2); + + Values values; + double x1 = 0.0, x2 = 1.75, x3 = 2.60; + values.insert(X(1), x1); + values.insert(X(2), x2); + values.insert(X(3), x3); + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e-0); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-0); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-0); + + auto f0 = std::make_shared>(X(1), X(2), 0.0, model0) + ->linearize(values); + auto f1 = std::make_shared>(X(1), X(2), 2.0, model1) + ->linearize(values); + std::vector factors{f0, f1}; + + GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors, true); + HybridGaussianFactorGraph hfg; + hfg.push_back(mixtureFactor); + + f0 = std::make_shared>(X(2), X(3), 0.0, model0) + ->linearize(values); + f1 = std::make_shared>(X(2), X(3), 2.0, model1) + ->linearize(values); + std::vector factors23{f0, f1}; + hfg.push_back(GaussianMixtureFactor({X(2), X(3)}, {m2}, factors23, true)); + + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + hfg.push_back(prior); + + hfg.push_back(PriorFactor(X(2), 2.0, prior_noise).linearize(values)); + + auto bn = hfg.eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{ + {X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}, {X(3), Vector1(-0.6)}}, + DiscreteValues{{M(1), 1}, {M(2), 0}}); + + EXPECT(assert_equal(expected, actual)); + + { + DiscreteValues dv{{M(1), 0}, {M(2), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 0}, {M(2), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}, {M(2), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}, {M(2), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + } +} + +/* ************************************************************************* */ +/** + * @brief Test components with differing covariances. + * The factor graph is + * *-X1-*-X2 + * | + * M1 + */ +TEST(GaussianMixtureFactor, DifferentCovariances) { + DiscreteKey m1(M(1), 2); + + Values values; + double x1 = 1.0, x2 = 1.0; + values.insert(X(1), x1); + values.insert(X(2), x2); + + double between = 0.0; + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + + auto f0 = + std::make_shared>(X(1), X(2), between, model0); + auto f1 = + std::make_shared>(X(1), X(2), between, model1); + std::vector factors{f0, f1}; + + // Create via toFactorGraph + using symbol_shorthand::Z; + Matrix H0_1, H0_2, H1_1, H1_2; + Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); + std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H0_1 /*Sp1*/}, + {X(2), H0_2 /*Tp2*/}}; + + Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); + std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H1_1 /*Sp1*/}, + {X(2), H1_2 /*Tp2*/}}; + gtsam::GaussianMixtureFactor gmf( + {X(1), X(2)}, {m1}, + {std::make_shared(X(1), H0_1, X(2), H0_2, -d0, model0), + std::make_shared(X(1), H1_1, X(2), H1_2, -d1, model1)}, + true); + + // Create FG with single GaussianMixtureFactor + HybridGaussianFactorGraph mixture_fg; + mixture_fg.add(gmf); + + // Linearized prior factor on X1 + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + mixture_fg.push_back(prior); + + auto hbn = mixture_fg.eliminateSequential(); + // hbn->print(); + + VectorValues cv; + cv.insert(X(1), Vector1(0.0)); + cv.insert(X(2), Vector1(0.0)); + + // Check that the error values at the MLE point μ. + AlgebraicDecisionTree errorTree = hbn->errorTree(cv); + + DiscreteValues dv0{{M(1), 0}}; + DiscreteValues dv1{{M(1), 1}}; + + // regression + EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); + EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); + + DiscreteConditional expected_m1(m1, "0.5/0.5"); + DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); + + EXPECT(assert_equal(expected_m1, actual_m1)); +} + +/* ************************************************************************* */ +/** + * @brief Test components with differing covariances + * but with a Bayes net P(Z|X, M) converted to a FG. + */ +TEST(GaussianMixtureFactor, DifferentCovariances2) { + DiscreteKey m1(M(1), 2); + + Values values; + double x1 = 1.0, x2 = 1.0; + values.insert(X(1), x1); + values.insert(X(2), x2); + + double between = 0.0; + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + + auto f0 = + std::make_shared>(X(1), X(2), between, model0); + auto f1 = + std::make_shared>(X(1), X(2), between, model1); + std::vector factors{f0, f1}; + + // Create via toFactorGraph + using symbol_shorthand::Z; + Matrix H0_1, H0_2, H1_1, H1_2; + Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); + std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H0_1 /*Sp1*/}, + {X(2), H0_2 /*Tp2*/}}; + + Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); + std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H1_1 /*Sp1*/}, + {X(2), H1_2 /*Tp2*/}}; + auto gm = new gtsam::GaussianMixture( + {Z(1)}, {X(1), X(2)}, {m1}, + {std::make_shared(terms0, 1, -d0, model0), + std::make_shared(terms1, 1, -d1, model1)}); + gtsam::HybridBayesNet bn; + bn.emplace_back(gm); + + gtsam::VectorValues measurements; + measurements.insert(Z(1), gtsam::Z_1x1); + // Create FG with single GaussianMixtureFactor + HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); + + // Linearized prior factor on X1 + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + mixture_fg.push_back(prior); + + auto hbn = mixture_fg.eliminateSequential(); + + VectorValues cv; + cv.insert(X(1), Vector1(0.0)); + cv.insert(X(2), Vector1(0.0)); + + // Check that the error values at the MLE point μ. + AlgebraicDecisionTree errorTree = hbn->errorTree(cv); + + DiscreteValues dv0{{M(1), 0}}; + DiscreteValues dv1{{M(1), 1}}; + + // regression + EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); + EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); + + DiscreteConditional expected_m1(m1, "0.5/0.5"); + DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); + + EXPECT(assert_equal(expected_m1, actual_m1)); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 93081d309..2d851b0ff 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -510,6 +510,7 @@ factor 0: b = [ -10 ] No noise model factor 1: +GaussianMixtureFactor Hybrid [x0 x1; m0]{ Choice(m0) 0 Leaf : @@ -534,6 +535,7 @@ Hybrid [x0 x1; m0]{ } factor 2: +GaussianMixtureFactor Hybrid [x1 x2; m1]{ Choice(m1) 0 Leaf : @@ -675,33 +677,41 @@ factor 6: P( m1 | m0 ): size: 3 conditional 0: Hybrid P( x0 | x1 m0) Discrete Keys = (m0, 2), + logNormalizationConstant: 1.38862 + Choice(m0) 0 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.85087 ] + logNormalizationConstant: 1.38862 No noise model 1 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.95037 ] + logNormalizationConstant: 1.38862 No noise model conditional 1: Hybrid P( x1 | x2 m0 m1) Discrete Keys = (m0, 2), (m1, 2), + logNormalizationConstant: 1.3935 + Choice(m1) 0 Choice(m0) 0 0 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.99901 ] + logNormalizationConstant: 1.3935 No noise model 0 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.90098 ] + logNormalizationConstant: 1.3935 No noise model 1 Choice(m0) @@ -709,16 +719,20 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10.098 ] + logNormalizationConstant: 1.3935 No noise model 1 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10 ] + logNormalizationConstant: 1.3935 No noise model conditional 2: Hybrid P( x2 | m0 m1) Discrete Keys = (m0, 2), (m1, 2), + logNormalizationConstant: 1.38857 + Choice(m1) 0 Choice(m0) 0 0 Leaf p(x2) @@ -726,6 +740,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1489 ] mean: 1 elements x2: -1.0099 + logNormalizationConstant: 1.38857 No noise model 0 1 Leaf p(x2) @@ -733,6 +748,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1479 ] mean: 1 elements x2: -1.0098 + logNormalizationConstant: 1.38857 No noise model 1 Choice(m0) @@ -741,6 +757,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0504 ] mean: 1 elements x2: -1.0001 + logNormalizationConstant: 1.38857 No noise model 1 1 Leaf p(x2) @@ -748,6 +765,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0494 ] mean: 1 elements x2: -1 + logNormalizationConstant: 1.38857 No noise model )"; From 8d54c4abe0fea13334fb2f6927639e53b250fae9 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:40:13 -0400 Subject: [PATCH 083/398] update VectorValues::== docstring --- gtsam/linear/VectorValues.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/linear/VectorValues.h b/gtsam/linear/VectorValues.h index 99ee4eb83..2fa50b7f6 100644 --- a/gtsam/linear/VectorValues.h +++ b/gtsam/linear/VectorValues.h @@ -263,7 +263,7 @@ namespace gtsam { /** equals required by Testable for unit testing */ bool equals(const VectorValues& x, double tol = 1e-9) const; - /// Check exact equality. + /// Check equality. friend bool operator==(const VectorValues& lhs, const VectorValues& rhs) { return lhs.equals(rhs); } From a78ffe19e81d6ae41b385535046d1bd5cbfcf938 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:45:19 -0400 Subject: [PATCH 084/398] update unit test to also check for GaussianBayesNet::logNormalizationConstant --- gtsam/linear/tests/testGaussianBayesNet.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gtsam/linear/tests/testGaussianBayesNet.cpp b/gtsam/linear/tests/testGaussianBayesNet.cpp index 966b70915..99453ee4e 100644 --- a/gtsam/linear/tests/testGaussianBayesNet.cpp +++ b/gtsam/linear/tests/testGaussianBayesNet.cpp @@ -80,6 +80,8 @@ TEST(GaussianBayesNet, Evaluate1) { smallBayesNet.at(0)->logNormalizationConstant() + smallBayesNet.at(1)->logNormalizationConstant(), 1e-9); + EXPECT_DOUBLES_EQUAL(log(constant), smallBayesNet.logNormalizationConstant(), + 1e-9); const double actual = smallBayesNet.evaluate(mean); EXPECT_DOUBLES_EQUAL(constant, actual, 1e-9); } From 910300b810cb32dcabf02e6286918f2442de6fec Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 17:43:48 -0400 Subject: [PATCH 085/398] fix test --- gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 93081d309..751e84d91 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -680,12 +680,14 @@ conditional 0: Hybrid P( x0 | x1 m0) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.85087 ] + logNormalizationConstant: 1.38862 No noise model 1 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.95037 ] + logNormalizationConstant: 1.38862 No noise model conditional 1: Hybrid P( x1 | x2 m0 m1) @@ -696,12 +698,14 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.99901 ] + logNormalizationConstant: 1.3935 No noise model 0 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.90098 ] + logNormalizationConstant: 1.3935 No noise model 1 Choice(m0) @@ -709,12 +713,14 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10.098 ] + logNormalizationConstant: 1.3935 No noise model 1 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10 ] + logNormalizationConstant: 1.3935 No noise model conditional 2: Hybrid P( x2 | m0 m1) @@ -726,6 +732,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1489 ] mean: 1 elements x2: -1.0099 + logNormalizationConstant: 1.38857 No noise model 0 1 Leaf p(x2) @@ -733,6 +740,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1479 ] mean: 1 elements x2: -1.0098 + logNormalizationConstant: 1.38857 No noise model 1 Choice(m0) @@ -741,6 +749,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0504 ] mean: 1 elements x2: -1.0001 + logNormalizationConstant: 1.38857 No noise model 1 1 Leaf p(x2) @@ -748,6 +757,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0494 ] mean: 1 elements x2: -1 + logNormalizationConstant: 1.38857 No noise model )"; From 9818f89ceca8000e24b7eb0e81bb5d05685718ca Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 17:48:04 -0400 Subject: [PATCH 086/398] remove GaussianBayesNetTree methods --- gtsam/hybrid/GaussianMixture.cpp | 29 ----------------------------- gtsam/hybrid/GaussianMixture.h | 17 +---------------- gtsam/hybrid/HybridFactor.h | 2 -- 3 files changed, 1 insertion(+), 47 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 36a34226b..1411ede8d 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -93,35 +93,6 @@ GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { return {conditionals_, wrap}; } -/* -*******************************************************************************/ -GaussianBayesNetTree GaussianMixture::add( - const GaussianBayesNetTree &sum) const { - using Y = GaussianBayesNet; - auto add = [](const Y &graph1, const Y &graph2) { - auto result = graph1; - if (graph2.size() == 0) { - return GaussianBayesNet(); - } - result.push_back(graph2); - return result; - }; - const auto tree = asGaussianBayesNetTree(); - return sum.empty() ? tree : sum.apply(tree, add); -} - -/* *******************************************************************************/ -GaussianBayesNetTree GaussianMixture::asGaussianBayesNetTree() const { - auto wrap = [](const GaussianConditional::shared_ptr &gc) { - if (gc) { - return GaussianBayesNet{gc}; - } else { - return GaussianBayesNet(); - } - }; - return {conditionals_, wrap}; -} - /* *******************************************************************************/ size_t GaussianMixture::nrComponents() const { size_t total = 0; diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index bfa342dcf..41692bb1c 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -67,17 +67,11 @@ class GTSAM_EXPORT GaussianMixture double logConstant_; ///< log of the normalization constant. /** - * @brief Convert a DecisionTree of factors into + * @brief Convert a GaussianMixture of conditionals into * a DecisionTree of Gaussian factor graphs. */ GaussianFactorGraphTree asGaussianFactorGraphTree() const; - /** - * @brief Convert a DecisionTree of conditionals into - * a DecisionTree of Gaussian Bayes nets. - */ - GaussianBayesNetTree asGaussianBayesNetTree() const; - /** * @brief Helper function to get the pruner functor. * @@ -256,15 +250,6 @@ class GTSAM_EXPORT GaussianMixture * @return GaussianFactorGraphTree */ GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const; - - /** - * @brief Merge the Gaussian Bayes Nets in `this` and `sum` while - * maintaining the decision tree structure. - * - * @param sum Decision Tree of Gaussian Bayes Nets - * @return GaussianBayesNetTree - */ - GaussianBayesNetTree add(const GaussianBayesNetTree &sum) const; /// @} private: diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index 418489d66..a9c0e53d2 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -34,8 +34,6 @@ class HybridValues; /// Alias for DecisionTree of GaussianFactorGraphs using GaussianFactorGraphTree = DecisionTree; -/// Alias for DecisionTree of GaussianBayesNets -using GaussianBayesNetTree = DecisionTree; KeyVector CollectKeys(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys); From 73d971a3c6aef8985e947d824ba96ba2ebaae0a6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 04:12:21 -0400 Subject: [PATCH 087/398] unit tests for AlgebraicDecisionTree helper methods --- .../tests/testAlgebraicDecisionTree.cpp | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp b/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp index 19f4686c2..d65a9c82b 100644 --- a/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp +++ b/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp @@ -593,6 +593,55 @@ TEST(ADT, zero) { EXPECT_DOUBLES_EQUAL(0, anotb(x11), 1e-9); } +/// Example ADT from 0 to 11. +ADT exampleADT() { + DiscreteKey A(0, 2), B(1, 3), C(2, 2); + ADT f(A & B & C, "0 6 2 8 4 10 1 7 3 9 5 11"); + return f; +} +/* ************************************************************************** */ +// Test sum +TEST(ADT, Sum) { + ADT a = exampleADT(); + double expected_sum = 0; + for (double i = 0; i < 12; i++) { + expected_sum += i; + } + EXPECT_DOUBLES_EQUAL(expected_sum, a.sum(), 1e-9); +} + +/* ************************************************************************** */ +// Test normalize +TEST(ADT, Normalize) { + ADT a = exampleADT(); + double sum = a.sum(); + auto actual = a.normalize(sum); + + DiscreteKey A(0, 2), B(1, 3), C(2, 2); + DiscreteKeys keys = DiscreteKeys{A, B, C}; + std::vector cpt{0 / sum, 6 / sum, 2 / sum, 8 / sum, + 4 / sum, 10 / sum, 1 / sum, 7 / sum, + 3 / sum, 9 / sum, 5 / sum, 11 / sum}; + ADT expected(keys, cpt); + EXPECT(assert_equal(expected, actual)); +} + +/* ************************************************************************** */ +// Test min +TEST(ADT, Min) { + ADT a = exampleADT(); + double min = a.min(); + EXPECT_DOUBLES_EQUAL(0.0, min, 1e-9); +} + +/* ************************************************************************** */ +// Test max +TEST(ADT, Max) { + ADT a = exampleADT(); + double max = a.max(); + EXPECT_DOUBLES_EQUAL(11.0, max, 1e-9); +} + /* ************************************************************************* */ int main() { TestResult tr; From 8abd2756ea895dbd0da4ab5246730533f27075ff Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 04:19:44 -0400 Subject: [PATCH 088/398] throw error if invalid assignment --- gtsam/hybrid/GaussianMixture.cpp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 1411ede8d..0a0332af8 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -346,9 +346,10 @@ double GaussianMixture::error(const HybridValues &values) const { } } - // The discrete assignment is not valid so we return 0.0 erorr. + // The discrete assignment is not valid so we throw an error. if (!valid_assignment) { - return 0.0; + throw std::runtime_error( + "Invalid discrete values in values. Not all discrete keys specified."); } // Directly index to get the conditional, no need to build the whole tree. From 450fb0a016942b5ac3bc9f0639a2c951cddd61d9 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 06:12:51 -0400 Subject: [PATCH 089/398] improve tests --- .../tests/testGaussianMixtureFactor.cpp | 328 ++++++++---------- 1 file changed, 143 insertions(+), 185 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 47b9ddc99..dfafb923b 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -38,6 +38,7 @@ using namespace gtsam; using noiseModel::Isotropic; using symbol_shorthand::M; using symbol_shorthand::X; +using symbol_shorthand::Z; /* ************************************************************************* */ // Check iterators of empty mixture. @@ -199,90 +200,161 @@ TEST(GaussianMixtureFactor, Error) { 4.0, mixtureFactor.error({continuousValues, discreteValues}), 1e-9); } +/** + * @brief Helper function to specify a Hybrid Bayes Net + * {P(X1) P(Z1 | X1, X2, M1)} and convert it to a Hybrid Factor Graph + * {P(X1)L(X1, X2, M1; Z1)} by converting to likelihoods given Z1. + * + * We can specify either different means or different sigmas, + * or both for each hybrid factor component. + * + * @param values Initial values for linearization. + * @param means The mean values for the conditional components. + * @param sigmas Noise model sigma values (standard deviation). + * @param m1 The discrete mode key. + * @param z1 The measurement value. + * @return HybridGaussianFactorGraph + */ +HybridGaussianFactorGraph GetFactorGraphFromBayesNet( + const gtsam::Values &values, const std::vector &means, + const std::vector &sigmas, DiscreteKey &m1, double z1 = 0.0) { + // Noise models + auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); + auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + + // GaussianMixtureFactor component factors + auto f0 = + std::make_shared>(X(1), X(2), means[0], model0); + auto f1 = + std::make_shared>(X(1), X(2), means[1], model1); + std::vector factors{f0, f1}; + + /// Get terms for each p^m(z1 | x1, x2) + Matrix H0_1, H0_2, H1_1, H1_2; + double x1 = values.at(X(1)), x2 = values.at(X(2)); + Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); + std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H0_1 /*Sp1*/}, + {X(2), H0_2 /*Tp2*/}}; + + Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); + std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H1_1 /*Sp1*/}, + {X(2), H1_2 /*Tp2*/}}; + // Create conditional P(Z1 | X1, X2, M1) + auto gm = new gtsam::GaussianMixture( + {Z(1)}, {X(1), X(2)}, {m1}, + {std::make_shared(terms0, 1, -d0, model0), + std::make_shared(terms1, 1, -d1, model1)}); + gtsam::HybridBayesNet bn; + bn.emplace_back(gm); + // bn.print(); + + // Create FG via toFactorGraph + gtsam::VectorValues measurements; + measurements.insert(Z(1), gtsam::I_1x1 * z1); // Set Z1 = 0 + HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); + + // Linearized prior factor on X1 + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + mixture_fg.push_back(prior); + + return mixture_fg; +} + /* ************************************************************************* */ -// Test components with differing means -TEST(GaussianMixtureFactor, DifferentMeans) { - DiscreteKey m1(M(1), 2), m2(M(2), 2); +/** + * @brief Test components with differing means. + * + * We specify a hybrid Bayes network P(Z | X, M) =p(X1)p(Z1 | X1, X2, M1), + * which is then converted to a factor graph by specifying Z1. + * + * p(Z1 | X1, X2, M1) has 2 factors each for the binary mode m1, with only the + * means being different. + */ +TEST(GaussianMixtureFactor, DifferentMeansHBN) { + DiscreteKey m1(M(1), 2); Values values; - double x1 = 0.0, x2 = 1.75, x3 = 2.60; + double x1 = 0.0, x2 = 1.75; values.insert(X(1), x1); values.insert(X(2), x2); - values.insert(X(3), x3); - auto model0 = noiseModel::Isotropic::Sigma(1, 1e-0); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-0); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-0); + // Different means, same sigma + std::vector means{0.0, 2.0}, sigmas{1e-0, 1e-0}; - auto f0 = std::make_shared>(X(1), X(2), 0.0, model0) - ->linearize(values); - auto f1 = std::make_shared>(X(1), X(2), 2.0, model1) - ->linearize(values); - std::vector factors{f0, f1}; - - GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors, true); - HybridGaussianFactorGraph hfg; - hfg.push_back(mixtureFactor); - - f0 = std::make_shared>(X(2), X(3), 0.0, model0) - ->linearize(values); - f1 = std::make_shared>(X(2), X(3), 2.0, model1) - ->linearize(values); - std::vector factors23{f0, f1}; - hfg.push_back(GaussianMixtureFactor({X(2), X(3)}, {m2}, factors23, true)); - - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); - hfg.push_back(prior); - - hfg.push_back(PriorFactor(X(2), 2.0, prior_noise).linearize(values)); - - auto bn = hfg.eliminateSequential(); - HybridValues actual = bn->optimize(); - - HybridValues expected( - VectorValues{ - {X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}, {X(3), Vector1(-0.6)}}, - DiscreteValues{{M(1), 1}, {M(2), 0}}); - - EXPECT(assert_equal(expected, actual)); + HybridGaussianFactorGraph hfg = + GetFactorGraphFromBayesNet(values, means, sigmas, m1, 0.0); { - DiscreteValues dv{{M(1), 0}, {M(2), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + // With no measurement on X2, each mode should be equally likely + auto bn = hfg.eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{{X(1), Vector1(0.0)}, {X(2), Vector1(-1.75)}}, + DiscreteValues{{M(1), 0}}); + + EXPECT(assert_equal(expected, actual)); + + { + DiscreteValues dv{{M(1), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); + } } { - DiscreteValues dv{{M(1), 0}, {M(2), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}, {M(2), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}, {M(2), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + // If we add a measurement on X2, we have more information to work with. + // Add a measurement on X2 + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + GaussianConditional meas_z2(Z(2), Vector1(2.0), I_1x1, X(2), I_1x1, + prior_noise); + auto prior_x2 = meas_z2.likelihood(Vector1(x2)); + + hfg.push_back(prior_x2); + + auto bn = hfg.eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{{X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}}, + DiscreteValues{{M(1), 1}}); + + EXPECT(assert_equal(expected, actual)); + + { + DiscreteValues dv{{M(1), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(2.12692448787, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(0.126928487854, error, 1e-9); + } } } /* ************************************************************************* */ /** - * @brief Test components with differing covariances. - * The factor graph is - * *-X1-*-X2 - * | - * M1 + * @brief Test components with differing covariances + * but with a Bayes net P(Z|X, M) converted to a FG. */ TEST(GaussianMixtureFactor, DifferentCovariances) { DiscreteKey m1(M(1), 2); @@ -292,123 +364,9 @@ TEST(GaussianMixtureFactor, DifferentCovariances) { values.insert(X(1), x1); values.insert(X(2), x2); - double between = 0.0; - - auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - - auto f0 = - std::make_shared>(X(1), X(2), between, model0); - auto f1 = - std::make_shared>(X(1), X(2), between, model1); - std::vector factors{f0, f1}; - - // Create via toFactorGraph - using symbol_shorthand::Z; - Matrix H0_1, H0_2, H1_1, H1_2; - Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); - std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H0_1 /*Sp1*/}, - {X(2), H0_2 /*Tp2*/}}; - - Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); - std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H1_1 /*Sp1*/}, - {X(2), H1_2 /*Tp2*/}}; - gtsam::GaussianMixtureFactor gmf( - {X(1), X(2)}, {m1}, - {std::make_shared(X(1), H0_1, X(2), H0_2, -d0, model0), - std::make_shared(X(1), H1_1, X(2), H1_2, -d1, model1)}, - true); - - // Create FG with single GaussianMixtureFactor - HybridGaussianFactorGraph mixture_fg; - mixture_fg.add(gmf); - - // Linearized prior factor on X1 - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); - mixture_fg.push_back(prior); - - auto hbn = mixture_fg.eliminateSequential(); - // hbn->print(); - - VectorValues cv; - cv.insert(X(1), Vector1(0.0)); - cv.insert(X(2), Vector1(0.0)); - - // Check that the error values at the MLE point μ. - AlgebraicDecisionTree errorTree = hbn->errorTree(cv); - - DiscreteValues dv0{{M(1), 0}}; - DiscreteValues dv1{{M(1), 1}}; - - // regression - EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); - EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); - - DiscreteConditional expected_m1(m1, "0.5/0.5"); - DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); - - EXPECT(assert_equal(expected_m1, actual_m1)); -} - -/* ************************************************************************* */ -/** - * @brief Test components with differing covariances - * but with a Bayes net P(Z|X, M) converted to a FG. - */ -TEST(GaussianMixtureFactor, DifferentCovariances2) { - DiscreteKey m1(M(1), 2); - - Values values; - double x1 = 1.0, x2 = 1.0; - values.insert(X(1), x1); - values.insert(X(2), x2); - - double between = 0.0; - - auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - - auto f0 = - std::make_shared>(X(1), X(2), between, model0); - auto f1 = - std::make_shared>(X(1), X(2), between, model1); - std::vector factors{f0, f1}; - - // Create via toFactorGraph - using symbol_shorthand::Z; - Matrix H0_1, H0_2, H1_1, H1_2; - Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); - std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H0_1 /*Sp1*/}, - {X(2), H0_2 /*Tp2*/}}; - - Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); - std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H1_1 /*Sp1*/}, - {X(2), H1_2 /*Tp2*/}}; - auto gm = new gtsam::GaussianMixture( - {Z(1)}, {X(1), X(2)}, {m1}, - {std::make_shared(terms0, 1, -d0, model0), - std::make_shared(terms1, 1, -d1, model1)}); - gtsam::HybridBayesNet bn; - bn.emplace_back(gm); - - gtsam::VectorValues measurements; - measurements.insert(Z(1), gtsam::Z_1x1); - // Create FG with single GaussianMixtureFactor - HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); - - // Linearized prior factor on X1 - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); - mixture_fg.push_back(prior); + std::vector means{0.0, 0.0}, sigmas{1e2, 1e-2}; + HybridGaussianFactorGraph mixture_fg = + GetFactorGraphFromBayesNet(values, means, sigmas, m1); auto hbn = mixture_fg.eliminateSequential(); From 191fc3da118aedc9461c6cbb22a213538b9b2a27 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 06:13:43 -0400 Subject: [PATCH 090/398] remove augmentation in GaussianMixture in favor of augmentation in GaussianMixtureFactor --- gtsam/hybrid/GaussianMixture.cpp | 18 ++---------------- gtsam/hybrid/GaussianMixtureFactor.cpp | 6 +++--- 2 files changed, 5 insertions(+), 19 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 0a0332af8..1440f7e9e 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -200,24 +200,10 @@ std::shared_ptr GaussianMixture::likelihood( const GaussianMixtureFactor::Factors likelihoods( conditionals_, [&](const GaussianConditional::shared_ptr &conditional) { const auto likelihood_m = conditional->likelihood(given); - const double Cgm_Kgcm = - logConstant_ - conditional->logNormalizationConstant(); - if (Cgm_Kgcm == 0.0) { - return likelihood_m; - } else { - // Add a constant factor to the likelihood in case the noise models - // are not all equal. - GaussianFactorGraph gfg; - gfg.push_back(likelihood_m); - Vector c(1); - c << std::sqrt(2.0 * Cgm_Kgcm); - auto constantFactor = std::make_shared(c); - gfg.push_back(constantFactor); - return std::make_shared(gfg); - } + return likelihood_m; }); return std::make_shared( - continuousParentKeys, discreteParentKeys, likelihoods); + continuousParentKeys, discreteParentKeys, likelihoods, true); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index e519cefe6..7fb16f0d1 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -29,7 +29,7 @@ namespace gtsam { /** - * @brief Helper function to correct the [A|b] matrices in the factor components + * @brief Helper function to augment the [A|b] matrices in the factor components * with the normalizer values. * This is done by storing the normalizer value in * the `b` vector as an additional row. @@ -39,7 +39,7 @@ namespace gtsam { * each component. * @return GaussianMixtureFactor::Factors */ -GaussianMixtureFactor::Factors correct( +GaussianMixtureFactor::Factors augment( const GaussianMixtureFactor::Factors &factors, bool varyingNormalizers) { if (!varyingNormalizers) { return factors; @@ -107,7 +107,7 @@ GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, const Factors &factors, bool varyingNormalizers) : Base(continuousKeys, discreteKeys), - factors_(correct(factors, varyingNormalizers)) {} + factors_(augment(factors, varyingNormalizers)) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { From 41d6113aea27b8fd0bf16531d477c4f2356f0c79 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:05:31 -0400 Subject: [PATCH 091/398] Kill == operator in VectorValues --- gtsam/linear/VectorValues.h | 5 ----- 1 file changed, 5 deletions(-) diff --git a/gtsam/linear/VectorValues.h b/gtsam/linear/VectorValues.h index 99ee4eb83..7fbd43ffc 100644 --- a/gtsam/linear/VectorValues.h +++ b/gtsam/linear/VectorValues.h @@ -263,11 +263,6 @@ namespace gtsam { /** equals required by Testable for unit testing */ bool equals(const VectorValues& x, double tol = 1e-9) const; - /// Check exact equality. - friend bool operator==(const VectorValues& lhs, const VectorValues& rhs) { - return lhs.equals(rhs); - } - /// @{ /// @name Advanced Interface /// @{ From 75d47246685807d1c5253645620ef1dc1e0eadd4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:05:47 -0400 Subject: [PATCH 092/398] remove extra imports in DiscreteBayesNet.cpp --- gtsam/discrete/DiscreteBayesNet.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/gtsam/discrete/DiscreteBayesNet.cpp b/gtsam/discrete/DiscreteBayesNet.cpp index c1aa18828..1c5c81e45 100644 --- a/gtsam/discrete/DiscreteBayesNet.cpp +++ b/gtsam/discrete/DiscreteBayesNet.cpp @@ -18,8 +18,6 @@ #include #include -#include -#include #include namespace gtsam { From dce56417bd68ccf7da7db614dbee535bc937cbef Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:07:03 -0400 Subject: [PATCH 093/398] minor edits --- gtsam/hybrid/tests/testGaussianMixtureFactor.cpp | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index dfafb923b..ede3c342d 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -209,14 +209,14 @@ TEST(GaussianMixtureFactor, Error) { * or both for each hybrid factor component. * * @param values Initial values for linearization. - * @param means The mean values for the conditional components. + * @param mus The mean values for the conditional components. * @param sigmas Noise model sigma values (standard deviation). * @param m1 The discrete mode key. * @param z1 The measurement value. * @return HybridGaussianFactorGraph */ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( - const gtsam::Values &values, const std::vector &means, + const gtsam::Values &values, const std::vector &mus, const std::vector &sigmas, DiscreteKey &m1, double z1 = 0.0) { // Noise models auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); @@ -224,11 +224,9 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); // GaussianMixtureFactor component factors - auto f0 = - std::make_shared>(X(1), X(2), means[0], model0); - auto f1 = - std::make_shared>(X(1), X(2), means[1], model1); - std::vector factors{f0, f1}; + auto f0 = std::make_shared>(X(1), X(2), mus[0], model0); + auto f1 = std::make_shared>(X(1), X(2), mus[1], model1); + // std::vector factors{f0, f1}; /// Get terms for each p^m(z1 | x1, x2) Matrix H0_1, H0_2, H1_1, H1_2; @@ -275,7 +273,7 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( * p(Z1 | X1, X2, M1) has 2 factors each for the binary mode m1, with only the * means being different. */ -TEST(GaussianMixtureFactor, DifferentMeansHBN) { +TEST(GaussianMixtureFactor, DifferentMeans) { DiscreteKey m1(M(1), 2); Values values; From 9e77eba9167b0226a09c17f59b1ecdcdd00c38d5 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:09:40 -0400 Subject: [PATCH 094/398] rename X1 to X0 and X2 to X1 --- .../tests/testGaussianMixtureFactor.cpp | 34 +++++++++---------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index ede3c342d..7926cdc5a 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -224,27 +224,27 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); // GaussianMixtureFactor component factors - auto f0 = std::make_shared>(X(1), X(2), mus[0], model0); - auto f1 = std::make_shared>(X(1), X(2), mus[1], model1); + auto f0 = std::make_shared>(X(0), X(1), mus[0], model0); + auto f1 = std::make_shared>(X(0), X(1), mus[1], model1); // std::vector factors{f0, f1}; /// Get terms for each p^m(z1 | x1, x2) Matrix H0_1, H0_2, H1_1, H1_2; - double x1 = values.at(X(1)), x2 = values.at(X(2)); + double x1 = values.at(X(0)), x2 = values.at(X(1)); Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, // - {X(1), H0_1 /*Sp1*/}, - {X(2), H0_2 /*Tp2*/}}; + {X(0), H0_1 /*Sp1*/}, + {X(1), H0_2 /*Tp2*/}}; Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, // - {X(1), H1_1 /*Sp1*/}, - {X(2), H1_2 /*Tp2*/}}; + {X(0), H1_1 /*Sp1*/}, + {X(1), H1_2 /*Tp2*/}}; // Create conditional P(Z1 | X1, X2, M1) auto gm = new gtsam::GaussianMixture( - {Z(1)}, {X(1), X(2)}, {m1}, + {Z(1)}, {X(0), X(1)}, {m1}, {std::make_shared(terms0, 1, -d0, model0), std::make_shared(terms1, 1, -d1, model1)}); gtsam::HybridBayesNet bn; @@ -257,7 +257,7 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); // Linearized prior factor on X1 - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + auto prior = PriorFactor(X(0), x1, prior_noise).linearize(values); mixture_fg.push_back(prior); return mixture_fg; @@ -278,8 +278,8 @@ TEST(GaussianMixtureFactor, DifferentMeans) { Values values; double x1 = 0.0, x2 = 1.75; - values.insert(X(1), x1); - values.insert(X(2), x2); + values.insert(X(0), x1); + values.insert(X(1), x2); // Different means, same sigma std::vector means{0.0, 2.0}, sigmas{1e-0, 1e-0}; @@ -293,7 +293,7 @@ TEST(GaussianMixtureFactor, DifferentMeans) { HybridValues actual = bn->optimize(); HybridValues expected( - VectorValues{{X(1), Vector1(0.0)}, {X(2), Vector1(-1.75)}}, + VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(-1.75)}}, DiscreteValues{{M(1), 0}}); EXPECT(assert_equal(expected, actual)); @@ -317,7 +317,7 @@ TEST(GaussianMixtureFactor, DifferentMeans) { // If we add a measurement on X2, we have more information to work with. // Add a measurement on X2 auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - GaussianConditional meas_z2(Z(2), Vector1(2.0), I_1x1, X(2), I_1x1, + GaussianConditional meas_z2(Z(2), Vector1(2.0), I_1x1, X(1), I_1x1, prior_noise); auto prior_x2 = meas_z2.likelihood(Vector1(x2)); @@ -327,7 +327,7 @@ TEST(GaussianMixtureFactor, DifferentMeans) { HybridValues actual = bn->optimize(); HybridValues expected( - VectorValues{{X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}}, + VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(0.25)}}, DiscreteValues{{M(1), 1}}); EXPECT(assert_equal(expected, actual)); @@ -359,8 +359,8 @@ TEST(GaussianMixtureFactor, DifferentCovariances) { Values values; double x1 = 1.0, x2 = 1.0; - values.insert(X(1), x1); - values.insert(X(2), x2); + values.insert(X(0), x1); + values.insert(X(1), x2); std::vector means{0.0, 0.0}, sigmas{1e2, 1e-2}; HybridGaussianFactorGraph mixture_fg = @@ -369,8 +369,8 @@ TEST(GaussianMixtureFactor, DifferentCovariances) { auto hbn = mixture_fg.eliminateSequential(); VectorValues cv; + cv.insert(X(0), Vector1(0.0)); cv.insert(X(1), Vector1(0.0)); - cv.insert(X(2), Vector1(0.0)); // Check that the error values at the MLE point μ. AlgebraicDecisionTree errorTree = hbn->errorTree(cv); From 3fc1019220596428dd18befe525286e16d70b9cf Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:10:21 -0400 Subject: [PATCH 095/398] provide logNormalizers directly to the augment method --- gtsam/hybrid/GaussianMixtureFactor.cpp | 66 ++++++++++---------------- gtsam/hybrid/GaussianMixtureFactor.h | 20 ++++---- 2 files changed, 37 insertions(+), 49 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 7fb16f0d1..0427eef7b 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -35,45 +35,17 @@ namespace gtsam { * the `b` vector as an additional row. * * @param factors DecisionTree of GaussianFactor shared pointers. - * @param varyingNormalizers Flag indicating the normalizers are different for - * each component. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. * @return GaussianMixtureFactor::Factors */ GaussianMixtureFactor::Factors augment( - const GaussianMixtureFactor::Factors &factors, bool varyingNormalizers) { - if (!varyingNormalizers) { - return factors; - } - - // First compute all the sqrt(|2 pi Sigma|) terms - auto computeNormalizers = [](const GaussianMixtureFactor::sharedFactor &gf) { - auto jf = std::dynamic_pointer_cast(gf); - // If we have, say, a Hessian factor, then no need to do anything - if (!jf) return 0.0; - - auto model = jf->get_model(); - // If there is no noise model, there is nothing to do. - if (!model) { - return 0.0; - } - // Since noise models are Gaussian, we can get the logDeterminant using the - // same trick as in GaussianConditional - double logDetR = - model->R().diagonal().unaryExpr([](double x) { return log(x); }).sum(); - double logDeterminantSigma = -2.0 * logDetR; - - size_t n = model->dim(); - constexpr double log2pi = 1.8378770664093454835606594728112; - return n * log2pi + logDeterminantSigma; - }; - - AlgebraicDecisionTree log_normalizers = - DecisionTree(factors, computeNormalizers); - + const GaussianMixtureFactor::Factors &factors, + const AlgebraicDecisionTree &logNormalizers) { // Find the minimum value so we can "proselytize" to positive values. // Done because we can't have sqrt of negative numbers. - double min_log_normalizer = log_normalizers.min(); - log_normalizers = log_normalizers.apply( + double min_log_normalizer = logNormalizers.min(); + AlgebraicDecisionTree log_normalizers = logNormalizers.apply( [&min_log_normalizer](double n) { return n - min_log_normalizer; }); // Finally, update the [A|b] matrices. @@ -82,8 +54,6 @@ GaussianMixtureFactor::Factors augment( const GaussianMixtureFactor::sharedFactor &gf) { auto jf = std::dynamic_pointer_cast(gf); if (!jf) return gf; - // If there is no noise model, there is nothing to do. - if (!jf->get_model()) return gf; // If the log_normalizer is 0, do nothing if (log_normalizers(assignment) == 0.0) return gf; @@ -102,12 +72,11 @@ GaussianMixtureFactor::Factors augment( } /* *******************************************************************************/ -GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys, - const Factors &factors, - bool varyingNormalizers) +GaussianMixtureFactor::GaussianMixtureFactor( + const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, + const Factors &factors, const AlgebraicDecisionTree &logNormalizers) : Base(continuousKeys, discreteKeys), - factors_(augment(factors, varyingNormalizers)) {} + factors_(augment(factors, logNormalizers)) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { @@ -194,6 +163,21 @@ double GaussianMixtureFactor::error(const HybridValues &values) const { const sharedFactor gf = factors_(values.discrete()); return gf->error(values.continuous()); } + /* *******************************************************************************/ +double ComputeLogNormalizer( + const noiseModel::Gaussian::shared_ptr &noise_model) { + // Since noise models are Gaussian, we can get the logDeterminant using + // the same trick as in GaussianConditional + double logDetR = noise_model->R() + .diagonal() + .unaryExpr([](double x) { return log(x); }) + .sum(); + double logDeterminantSigma = -2.0 * logDetR; + + size_t n = noise_model->dim(); + constexpr double log2pi = 1.8378770664093454835606594728112; + return n * log2pi + logDeterminantSigma; +} } // namespace gtsam diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 588501bbe..6e9f6034e 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -82,13 +82,14 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * their cardinalities. * @param factors The decision tree of Gaussian factors stored as the mixture * density. - * @param varyingNormalizers Flag indicating factor components have varying - * normalizer values. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, const Factors &factors, - bool varyingNormalizers = false); + const AlgebraicDecisionTree &logNormalizers = + AlgebraicDecisionTree(0.0)); /** * @brief Construct a new GaussianMixtureFactor object using a vector of @@ -97,16 +98,16 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. - * @param varyingNormalizers Flag indicating factor components have varying - * normalizer values. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, const std::vector &factors, - bool varyingNormalizers = false) + const AlgebraicDecisionTree &logNormalizers = + AlgebraicDecisionTree(0.0)) : GaussianMixtureFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors), - varyingNormalizers) {} + Factors(discreteKeys, factors), logNormalizers) {} /// @} /// @name Testable @@ -178,4 +179,7 @@ template <> struct traits : public Testable { }; +double ComputeLogNormalizer( + const noiseModel::Gaussian::shared_ptr &noise_model); + } // namespace gtsam From cfef6d3d278ac29d006faa285baa3892623c3e90 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:10:40 -0400 Subject: [PATCH 096/398] update GaussianMixture::likelihood to compute the logNormalizers --- gtsam/hybrid/GaussianMixture.cpp | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 1440f7e9e..6c92f0252 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -202,8 +202,25 @@ std::shared_ptr GaussianMixture::likelihood( const auto likelihood_m = conditional->likelihood(given); return likelihood_m; }); + + // First compute all the sqrt(|2 pi Sigma|) terms + auto computeLogNormalizers = [](const GaussianFactor::shared_ptr &gf) { + auto jf = std::dynamic_pointer_cast(gf); + // If we have, say, a Hessian factor, then no need to do anything + if (!jf) return 0.0; + + auto model = jf->get_model(); + // If there is no noise model, there is nothing to do. + if (!model) { + return 0.0; + } + return ComputeLogNormalizer(model); + }; + + AlgebraicDecisionTree log_normalizers = + DecisionTree(likelihoods, computeLogNormalizers); return std::make_shared( - continuousParentKeys, discreteParentKeys, likelihoods, true); + continuousParentKeys, discreteParentKeys, likelihoods, log_normalizers); } /* ************************************************************************* */ From 30bf261c15d8190c9eea09057bf11269d38628c7 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:11:00 -0400 Subject: [PATCH 097/398] Tests which verify direct factor specification works well --- .../tests/testGaussianMixtureFactor.cpp | 145 ++++++++++++++++++ 1 file changed, 145 insertions(+) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 7926cdc5a..10541cf88 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -388,6 +388,151 @@ TEST(GaussianMixtureFactor, DifferentCovariances) { EXPECT(assert_equal(expected_m1, actual_m1)); } +HybridGaussianFactorGraph CreateFactorGraph(const gtsam::Values &values, + const std::vector &mus, + const std::vector &sigmas, + DiscreteKey &m1) { + auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); + auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + + auto f0 = std::make_shared>(X(0), X(1), mus[0], model0) + ->linearize(values); + auto f1 = std::make_shared>(X(0), X(1), mus[1], model1) + ->linearize(values); + + // Create GaussianMixtureFactor + std::vector factors{f0, f1}; + AlgebraicDecisionTree logNormalizers( + {m1}, std::vector{ComputeLogNormalizer(model0), + ComputeLogNormalizer(model1)}); + GaussianMixtureFactor mixtureFactor({X(0), X(1)}, {m1}, factors, + logNormalizers); + + HybridGaussianFactorGraph hfg; + hfg.push_back(mixtureFactor); + + hfg.push_back(PriorFactor(X(0), values.at(X(0)), prior_noise) + .linearize(values)); + + return hfg; +} + +TEST(GaussianMixtureFactor, DifferentMeansFG) { + DiscreteKey m1(M(1), 2); + + Values values; + double x1 = 0.0, x2 = 1.75; + values.insert(X(0), x1); + values.insert(X(1), x2); + + std::vector mus = {0.0, 2.0}, sigmas = {1e-0, 1e-0}; + + HybridGaussianFactorGraph hfg = CreateFactorGraph(values, mus, sigmas, m1); + + { + auto bn = hfg.eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(-1.75)}}, + DiscreteValues{{M(1), 0}}); + + EXPECT(assert_equal(expected, actual)); + + { + DiscreteValues dv{{M(1), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); + } + } + + { + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + hfg.push_back( + PriorFactor(X(1), mus[1], prior_noise).linearize(values)); + + auto bn = hfg.eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(0.25)}}, + DiscreteValues{{M(1), 1}}); + + EXPECT(assert_equal(expected, actual)); + + { + DiscreteValues dv{{M(1), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(2.12692448787, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(0.126928487854, error, 1e-9); + } + } +} + +/* ************************************************************************* */ +/** + * @brief Test components with differing covariances. + * The factor graph is + * *-X1-*-X2 + * | + * M1 + */ +TEST(GaussianMixtureFactor, DifferentCovariancesFG) { + DiscreteKey m1(M(1), 2); + + Values values; + double x1 = 1.0, x2 = 1.0; + values.insert(X(0), x1); + values.insert(X(1), x2); + + std::vector mus = {0.0, 0.0}, sigmas = {1e2, 1e-2}; + + // Create FG with GaussianMixtureFactor and prior on X1 + HybridGaussianFactorGraph mixture_fg = + CreateFactorGraph(values, mus, sigmas, m1); + + auto hbn = mixture_fg.eliminateSequential(); + + VectorValues cv; + cv.insert(X(0), Vector1(0.0)); + cv.insert(X(1), Vector1(0.0)); + + // Check that the error values at the MLE point μ. + AlgebraicDecisionTree errorTree = hbn->errorTree(cv); + hbn->errorTree(cv).print(); + hbn2->errorTree(cv).print(); + + DiscreteValues dv0{{M(1), 0}}; + DiscreteValues dv1{{M(1), 1}}; + + // regression + EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); + EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); + + DiscreteConditional expected_m1(m1, "0.5/0.5"); + DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); + + EXPECT(assert_equal(expected_m1, actual_m1)); +} + /* ************************************************************************* */ int main() { TestResult tr; From bfaff504df6cba7d5426d1296b37d5e5f419d499 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:12:23 -0400 Subject: [PATCH 098/398] remove extra prints --- gtsam/hybrid/tests/testGaussianMixtureFactor.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 10541cf88..b08fb79b7 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -517,8 +517,6 @@ TEST(GaussianMixtureFactor, DifferentCovariancesFG) { // Check that the error values at the MLE point μ. AlgebraicDecisionTree errorTree = hbn->errorTree(cv); - hbn->errorTree(cv).print(); - hbn2->errorTree(cv).print(); DiscreteValues dv0{{M(1), 0}}; DiscreteValues dv1{{M(1), 1}}; From 665d75544844e7d9514c294397260e4c236977f4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 22 Aug 2024 11:57:57 -0400 Subject: [PATCH 099/398] add docstring and GTSAM_EXPORT for ComputeLogNormalizer --- gtsam/hybrid/GaussianMixtureFactor.h | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 6e9f6034e..6680abb54 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -179,7 +179,16 @@ template <> struct traits : public Testable { }; -double ComputeLogNormalizer( +/** + * @brief Helper function to compute the sqrt(|2πΣ|) normalizer values + * for a Gaussian noise model. + * We compute this in the log-space for numerical accuracy. + * + * @param noise_model The Gaussian noise model + * whose normalizer we wish to compute. + * @return double + */ +GTSAM_EXPORT double ComputeLogNormalizer( const noiseModel::Gaussian::shared_ptr &noise_model); } // namespace gtsam From 79c7c6a8b6f4bb2945ea6ac621fc3c3a1f66aaa6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:10:21 -0400 Subject: [PATCH 100/398] provide logNormalizers directly to the augment method --- gtsam/hybrid/GaussianMixtureFactor.cpp | 66 ++++++++++---------------- gtsam/hybrid/GaussianMixtureFactor.h | 20 ++++---- 2 files changed, 37 insertions(+), 49 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 7fb16f0d1..0427eef7b 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -35,45 +35,17 @@ namespace gtsam { * the `b` vector as an additional row. * * @param factors DecisionTree of GaussianFactor shared pointers. - * @param varyingNormalizers Flag indicating the normalizers are different for - * each component. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. * @return GaussianMixtureFactor::Factors */ GaussianMixtureFactor::Factors augment( - const GaussianMixtureFactor::Factors &factors, bool varyingNormalizers) { - if (!varyingNormalizers) { - return factors; - } - - // First compute all the sqrt(|2 pi Sigma|) terms - auto computeNormalizers = [](const GaussianMixtureFactor::sharedFactor &gf) { - auto jf = std::dynamic_pointer_cast(gf); - // If we have, say, a Hessian factor, then no need to do anything - if (!jf) return 0.0; - - auto model = jf->get_model(); - // If there is no noise model, there is nothing to do. - if (!model) { - return 0.0; - } - // Since noise models are Gaussian, we can get the logDeterminant using the - // same trick as in GaussianConditional - double logDetR = - model->R().diagonal().unaryExpr([](double x) { return log(x); }).sum(); - double logDeterminantSigma = -2.0 * logDetR; - - size_t n = model->dim(); - constexpr double log2pi = 1.8378770664093454835606594728112; - return n * log2pi + logDeterminantSigma; - }; - - AlgebraicDecisionTree log_normalizers = - DecisionTree(factors, computeNormalizers); - + const GaussianMixtureFactor::Factors &factors, + const AlgebraicDecisionTree &logNormalizers) { // Find the minimum value so we can "proselytize" to positive values. // Done because we can't have sqrt of negative numbers. - double min_log_normalizer = log_normalizers.min(); - log_normalizers = log_normalizers.apply( + double min_log_normalizer = logNormalizers.min(); + AlgebraicDecisionTree log_normalizers = logNormalizers.apply( [&min_log_normalizer](double n) { return n - min_log_normalizer; }); // Finally, update the [A|b] matrices. @@ -82,8 +54,6 @@ GaussianMixtureFactor::Factors augment( const GaussianMixtureFactor::sharedFactor &gf) { auto jf = std::dynamic_pointer_cast(gf); if (!jf) return gf; - // If there is no noise model, there is nothing to do. - if (!jf->get_model()) return gf; // If the log_normalizer is 0, do nothing if (log_normalizers(assignment) == 0.0) return gf; @@ -102,12 +72,11 @@ GaussianMixtureFactor::Factors augment( } /* *******************************************************************************/ -GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys, - const Factors &factors, - bool varyingNormalizers) +GaussianMixtureFactor::GaussianMixtureFactor( + const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, + const Factors &factors, const AlgebraicDecisionTree &logNormalizers) : Base(continuousKeys, discreteKeys), - factors_(augment(factors, varyingNormalizers)) {} + factors_(augment(factors, logNormalizers)) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { @@ -194,6 +163,21 @@ double GaussianMixtureFactor::error(const HybridValues &values) const { const sharedFactor gf = factors_(values.discrete()); return gf->error(values.continuous()); } + /* *******************************************************************************/ +double ComputeLogNormalizer( + const noiseModel::Gaussian::shared_ptr &noise_model) { + // Since noise models are Gaussian, we can get the logDeterminant using + // the same trick as in GaussianConditional + double logDetR = noise_model->R() + .diagonal() + .unaryExpr([](double x) { return log(x); }) + .sum(); + double logDeterminantSigma = -2.0 * logDetR; + + size_t n = noise_model->dim(); + constexpr double log2pi = 1.8378770664093454835606594728112; + return n * log2pi + logDeterminantSigma; +} } // namespace gtsam diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 588501bbe..6e9f6034e 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -82,13 +82,14 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * their cardinalities. * @param factors The decision tree of Gaussian factors stored as the mixture * density. - * @param varyingNormalizers Flag indicating factor components have varying - * normalizer values. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, const Factors &factors, - bool varyingNormalizers = false); + const AlgebraicDecisionTree &logNormalizers = + AlgebraicDecisionTree(0.0)); /** * @brief Construct a new GaussianMixtureFactor object using a vector of @@ -97,16 +98,16 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. - * @param varyingNormalizers Flag indicating factor components have varying - * normalizer values. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, const std::vector &factors, - bool varyingNormalizers = false) + const AlgebraicDecisionTree &logNormalizers = + AlgebraicDecisionTree(0.0)) : GaussianMixtureFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors), - varyingNormalizers) {} + Factors(discreteKeys, factors), logNormalizers) {} /// @} /// @name Testable @@ -178,4 +179,7 @@ template <> struct traits : public Testable { }; +double ComputeLogNormalizer( + const noiseModel::Gaussian::shared_ptr &noise_model); + } // namespace gtsam From 9a6d2cf32384d437960def61cd3d547c5f73f41e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:10:40 -0400 Subject: [PATCH 101/398] update GaussianMixture::likelihood to compute the logNormalizers --- gtsam/hybrid/GaussianMixture.cpp | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 1440f7e9e..6c92f0252 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -202,8 +202,25 @@ std::shared_ptr GaussianMixture::likelihood( const auto likelihood_m = conditional->likelihood(given); return likelihood_m; }); + + // First compute all the sqrt(|2 pi Sigma|) terms + auto computeLogNormalizers = [](const GaussianFactor::shared_ptr &gf) { + auto jf = std::dynamic_pointer_cast(gf); + // If we have, say, a Hessian factor, then no need to do anything + if (!jf) return 0.0; + + auto model = jf->get_model(); + // If there is no noise model, there is nothing to do. + if (!model) { + return 0.0; + } + return ComputeLogNormalizer(model); + }; + + AlgebraicDecisionTree log_normalizers = + DecisionTree(likelihoods, computeLogNormalizers); return std::make_shared( - continuousParentKeys, discreteParentKeys, likelihoods, true); + continuousParentKeys, discreteParentKeys, likelihoods, log_normalizers); } /* ************************************************************************* */ From 98a2668fb1e78684ec9fa290e032235162cf2fe9 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 22 Aug 2024 11:57:57 -0400 Subject: [PATCH 102/398] add docstring and GTSAM_EXPORT for ComputeLogNormalizer --- gtsam/hybrid/GaussianMixtureFactor.h | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 6e9f6034e..6680abb54 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -179,7 +179,16 @@ template <> struct traits : public Testable { }; -double ComputeLogNormalizer( +/** + * @brief Helper function to compute the sqrt(|2πΣ|) normalizer values + * for a Gaussian noise model. + * We compute this in the log-space for numerical accuracy. + * + * @param noise_model The Gaussian noise model + * whose normalizer we wish to compute. + * @return double + */ +GTSAM_EXPORT double ComputeLogNormalizer( const noiseModel::Gaussian::shared_ptr &noise_model); } // namespace gtsam From cd9ee0845765767e98c8f334e3a2cf4a4bed6a7e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 22 Aug 2024 14:43:23 -0400 Subject: [PATCH 103/398] remove augment method in favor of conversion --- gtsam/hybrid/GaussianMixture.cpp | 35 ++++++++--------- gtsam/hybrid/GaussianMixtureFactor.cpp | 52 ++------------------------ gtsam/hybrid/GaussianMixtureFactor.h | 14 ++----- 3 files changed, 23 insertions(+), 78 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 6c92f0252..0a0332af8 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -200,27 +200,24 @@ std::shared_ptr GaussianMixture::likelihood( const GaussianMixtureFactor::Factors likelihoods( conditionals_, [&](const GaussianConditional::shared_ptr &conditional) { const auto likelihood_m = conditional->likelihood(given); - return likelihood_m; + const double Cgm_Kgcm = + logConstant_ - conditional->logNormalizationConstant(); + if (Cgm_Kgcm == 0.0) { + return likelihood_m; + } else { + // Add a constant factor to the likelihood in case the noise models + // are not all equal. + GaussianFactorGraph gfg; + gfg.push_back(likelihood_m); + Vector c(1); + c << std::sqrt(2.0 * Cgm_Kgcm); + auto constantFactor = std::make_shared(c); + gfg.push_back(constantFactor); + return std::make_shared(gfg); + } }); - - // First compute all the sqrt(|2 pi Sigma|) terms - auto computeLogNormalizers = [](const GaussianFactor::shared_ptr &gf) { - auto jf = std::dynamic_pointer_cast(gf); - // If we have, say, a Hessian factor, then no need to do anything - if (!jf) return 0.0; - - auto model = jf->get_model(); - // If there is no noise model, there is nothing to do. - if (!model) { - return 0.0; - } - return ComputeLogNormalizer(model); - }; - - AlgebraicDecisionTree log_normalizers = - DecisionTree(likelihoods, computeLogNormalizers); return std::make_shared( - continuousParentKeys, discreteParentKeys, likelihoods, log_normalizers); + continuousParentKeys, discreteParentKeys, likelihoods); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 0427eef7b..b4ee89cb0 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -28,55 +28,11 @@ namespace gtsam { -/** - * @brief Helper function to augment the [A|b] matrices in the factor components - * with the normalizer values. - * This is done by storing the normalizer value in - * the `b` vector as an additional row. - * - * @param factors DecisionTree of GaussianFactor shared pointers. - * @param logNormalizers Tree of log-normalizers corresponding to each - * Gaussian factor in factors. - * @return GaussianMixtureFactor::Factors - */ -GaussianMixtureFactor::Factors augment( - const GaussianMixtureFactor::Factors &factors, - const AlgebraicDecisionTree &logNormalizers) { - // Find the minimum value so we can "proselytize" to positive values. - // Done because we can't have sqrt of negative numbers. - double min_log_normalizer = logNormalizers.min(); - AlgebraicDecisionTree log_normalizers = logNormalizers.apply( - [&min_log_normalizer](double n) { return n - min_log_normalizer; }); - - // Finally, update the [A|b] matrices. - auto update = [&log_normalizers]( - const Assignment &assignment, - const GaussianMixtureFactor::sharedFactor &gf) { - auto jf = std::dynamic_pointer_cast(gf); - if (!jf) return gf; - // If the log_normalizer is 0, do nothing - if (log_normalizers(assignment) == 0.0) return gf; - - GaussianFactorGraph gfg; - gfg.push_back(jf); - - Vector c(1); - c << std::sqrt(log_normalizers(assignment)); - auto constantFactor = std::make_shared(c); - - gfg.push_back(constantFactor); - return std::dynamic_pointer_cast( - std::make_shared(gfg)); - }; - return factors.apply(update); -} - /* *******************************************************************************/ -GaussianMixtureFactor::GaussianMixtureFactor( - const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors, const AlgebraicDecisionTree &logNormalizers) - : Base(continuousKeys, discreteKeys), - factors_(augment(factors, logNormalizers)) {} +GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, + const DiscreteKeys &discreteKeys, + const Factors &factors) + : Base(continuousKeys, discreteKeys), factors_(factors) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 6680abb54..4570268f1 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -82,14 +82,10 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * their cardinalities. * @param factors The decision tree of Gaussian factors stored as the mixture * density. - * @param logNormalizers Tree of log-normalizers corresponding to each - * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors, - const AlgebraicDecisionTree &logNormalizers = - AlgebraicDecisionTree(0.0)); + const Factors &factors); /** * @brief Construct a new GaussianMixtureFactor object using a vector of @@ -98,16 +94,12 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. - * @param logNormalizers Tree of log-normalizers corresponding to each - * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const std::vector &factors, - const AlgebraicDecisionTree &logNormalizers = - AlgebraicDecisionTree(0.0)) + const std::vector &factors) : GaussianMixtureFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors), logNormalizers) {} + Factors(discreteKeys, factors)) {} /// @} /// @name Testable From 91ccbebf224c52063dd43ffb69ecb7cef889cfbd Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 22 Aug 2024 14:44:54 -0400 Subject: [PATCH 104/398] remove ComputeLogNormalizer function --- gtsam/hybrid/GaussianMixtureFactor.cpp | 16 ---------------- gtsam/hybrid/GaussianMixtureFactor.h | 12 ------------ 2 files changed, 28 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index b4ee89cb0..8471cef33 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -120,20 +120,4 @@ double GaussianMixtureFactor::error(const HybridValues &values) const { return gf->error(values.continuous()); } -/* *******************************************************************************/ -double ComputeLogNormalizer( - const noiseModel::Gaussian::shared_ptr &noise_model) { - // Since noise models are Gaussian, we can get the logDeterminant using - // the same trick as in GaussianConditional - double logDetR = noise_model->R() - .diagonal() - .unaryExpr([](double x) { return log(x); }) - .sum(); - double logDeterminantSigma = -2.0 * logDetR; - - size_t n = noise_model->dim(); - constexpr double log2pi = 1.8378770664093454835606594728112; - return n * log2pi + logDeterminantSigma; -} - } // namespace gtsam diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 4570268f1..b1be6ef1d 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -171,16 +171,4 @@ template <> struct traits : public Testable { }; -/** - * @brief Helper function to compute the sqrt(|2πΣ|) normalizer values - * for a Gaussian noise model. - * We compute this in the log-space for numerical accuracy. - * - * @param noise_model The Gaussian noise model - * whose normalizer we wish to compute. - * @return double - */ -GTSAM_EXPORT double ComputeLogNormalizer( - const noiseModel::Gaussian::shared_ptr &noise_model); - } // namespace gtsam From 07a00885139c91121b93c2e9a4d5cfed4e016e2f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 22 Aug 2024 20:32:14 -0400 Subject: [PATCH 105/398] compute logNormalizers and pass to GaussianMixtureFactor --- gtsam/hybrid/GaussianMixture.cpp | 35 +++++++------ gtsam/hybrid/GaussianMixtureFactor.cpp | 68 ++++++++++++++++++++++++-- gtsam/hybrid/GaussianMixtureFactor.h | 14 ++++-- 3 files changed, 94 insertions(+), 23 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 0a0332af8..6c92f0252 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -200,24 +200,27 @@ std::shared_ptr GaussianMixture::likelihood( const GaussianMixtureFactor::Factors likelihoods( conditionals_, [&](const GaussianConditional::shared_ptr &conditional) { const auto likelihood_m = conditional->likelihood(given); - const double Cgm_Kgcm = - logConstant_ - conditional->logNormalizationConstant(); - if (Cgm_Kgcm == 0.0) { - return likelihood_m; - } else { - // Add a constant factor to the likelihood in case the noise models - // are not all equal. - GaussianFactorGraph gfg; - gfg.push_back(likelihood_m); - Vector c(1); - c << std::sqrt(2.0 * Cgm_Kgcm); - auto constantFactor = std::make_shared(c); - gfg.push_back(constantFactor); - return std::make_shared(gfg); - } + return likelihood_m; }); + + // First compute all the sqrt(|2 pi Sigma|) terms + auto computeLogNormalizers = [](const GaussianFactor::shared_ptr &gf) { + auto jf = std::dynamic_pointer_cast(gf); + // If we have, say, a Hessian factor, then no need to do anything + if (!jf) return 0.0; + + auto model = jf->get_model(); + // If there is no noise model, there is nothing to do. + if (!model) { + return 0.0; + } + return ComputeLogNormalizer(model); + }; + + AlgebraicDecisionTree log_normalizers = + DecisionTree(likelihoods, computeLogNormalizers); return std::make_shared( - continuousParentKeys, discreteParentKeys, likelihoods); + continuousParentKeys, discreteParentKeys, likelihoods, log_normalizers); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 8471cef33..0427eef7b 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -28,11 +28,55 @@ namespace gtsam { +/** + * @brief Helper function to augment the [A|b] matrices in the factor components + * with the normalizer values. + * This is done by storing the normalizer value in + * the `b` vector as an additional row. + * + * @param factors DecisionTree of GaussianFactor shared pointers. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. + * @return GaussianMixtureFactor::Factors + */ +GaussianMixtureFactor::Factors augment( + const GaussianMixtureFactor::Factors &factors, + const AlgebraicDecisionTree &logNormalizers) { + // Find the minimum value so we can "proselytize" to positive values. + // Done because we can't have sqrt of negative numbers. + double min_log_normalizer = logNormalizers.min(); + AlgebraicDecisionTree log_normalizers = logNormalizers.apply( + [&min_log_normalizer](double n) { return n - min_log_normalizer; }); + + // Finally, update the [A|b] matrices. + auto update = [&log_normalizers]( + const Assignment &assignment, + const GaussianMixtureFactor::sharedFactor &gf) { + auto jf = std::dynamic_pointer_cast(gf); + if (!jf) return gf; + // If the log_normalizer is 0, do nothing + if (log_normalizers(assignment) == 0.0) return gf; + + GaussianFactorGraph gfg; + gfg.push_back(jf); + + Vector c(1); + c << std::sqrt(log_normalizers(assignment)); + auto constantFactor = std::make_shared(c); + + gfg.push_back(constantFactor); + return std::dynamic_pointer_cast( + std::make_shared(gfg)); + }; + return factors.apply(update); +} + /* *******************************************************************************/ -GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys, - const Factors &factors) - : Base(continuousKeys, discreteKeys), factors_(factors) {} +GaussianMixtureFactor::GaussianMixtureFactor( + const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, + const Factors &factors, const AlgebraicDecisionTree &logNormalizers) + : Base(continuousKeys, discreteKeys), + factors_(augment(factors, logNormalizers)) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { @@ -120,4 +164,20 @@ double GaussianMixtureFactor::error(const HybridValues &values) const { return gf->error(values.continuous()); } +/* *******************************************************************************/ +double ComputeLogNormalizer( + const noiseModel::Gaussian::shared_ptr &noise_model) { + // Since noise models are Gaussian, we can get the logDeterminant using + // the same trick as in GaussianConditional + double logDetR = noise_model->R() + .diagonal() + .unaryExpr([](double x) { return log(x); }) + .sum(); + double logDeterminantSigma = -2.0 * logDetR; + + size_t n = noise_model->dim(); + constexpr double log2pi = 1.8378770664093454835606594728112; + return n * log2pi + logDeterminantSigma; +} + } // namespace gtsam diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 4570268f1..6680abb54 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -82,10 +82,14 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * their cardinalities. * @param factors The decision tree of Gaussian factors stored as the mixture * density. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors); + const Factors &factors, + const AlgebraicDecisionTree &logNormalizers = + AlgebraicDecisionTree(0.0)); /** * @brief Construct a new GaussianMixtureFactor object using a vector of @@ -94,12 +98,16 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const std::vector &factors) + const std::vector &factors, + const AlgebraicDecisionTree &logNormalizers = + AlgebraicDecisionTree(0.0)) : GaussianMixtureFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors)) {} + Factors(discreteKeys, factors), logNormalizers) {} /// @} /// @name Testable From 7765314638432c267eaf1631c61786c3b55f79d6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 24 Aug 2024 05:23:52 -0400 Subject: [PATCH 106/398] fix return type of EliminateQR --- gtsam/linear/linear.i | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/gtsam/linear/linear.i b/gtsam/linear/linear.i index 3a629f349..ed19eaa3b 100644 --- a/gtsam/linear/linear.i +++ b/gtsam/linear/linear.i @@ -375,7 +375,8 @@ virtual class JacobianFactor : gtsam::GaussianFactor { void serialize() const; }; -pair EliminateQR(const gtsam::GaussianFactorGraph& factors, const gtsam::Ordering& keys); +pair EliminateQR( + const gtsam::GaussianFactorGraph& factors, const gtsam::Ordering& keys); #include virtual class HessianFactor : gtsam::GaussianFactor { From 9d27ce8186c1023e9e84f0476d14b6a63953ff59 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 24 Aug 2024 05:29:02 -0400 Subject: [PATCH 107/398] remove unused code in HybridBayesNet --- gtsam/hybrid/HybridBayesNet.cpp | 1 + gtsam/hybrid/HybridBayesNet.h | 10 ---------- 2 files changed, 1 insertion(+), 10 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 362174514..b02967555 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -377,4 +377,5 @@ HybridGaussianFactorGraph HybridBayesNet::toFactorGraph( } return fg; } + } // namespace gtsam diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 55eaf6b5e..032cd55b9 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -252,14 +252,4 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { template <> struct traits : public Testable {}; -/** - * @brief Add a Gaussian conditional to each node of the GaussianBayesNetTree - * - * @param gbnTree - * @param factor - * @return GaussianBayesNetTree - */ -GaussianBayesNetTree addGaussian(const GaussianBayesNetTree &gbnTree, - const GaussianConditional::shared_ptr &factor); - } // namespace gtsam From 0145a93d66db0ad1a64fd25f954eaee05592ea1d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 24 Aug 2024 05:32:47 -0400 Subject: [PATCH 108/398] more code cleanup --- gtsam/hybrid/HybridBayesTree.cpp | 11 +- gtsam/hybrid/HybridBayesTree.h | 4 +- gtsam/hybrid/tests/testMixtureFactor.cpp | 215 ----------------------- 3 files changed, 8 insertions(+), 222 deletions(-) diff --git a/gtsam/hybrid/HybridBayesTree.cpp b/gtsam/hybrid/HybridBayesTree.cpp index f08eff01b..da2645b5a 100644 --- a/gtsam/hybrid/HybridBayesTree.cpp +++ b/gtsam/hybrid/HybridBayesTree.cpp @@ -136,8 +136,7 @@ struct HybridAssignmentData { } }; -/* ************************************************************************* - */ +/* ************************************************************************* */ GaussianBayesTree HybridBayesTree::choose( const DiscreteValues& assignment) const { GaussianBayesTree gbt; @@ -157,8 +156,12 @@ GaussianBayesTree HybridBayesTree::choose( return gbt; } -/* ************************************************************************* - */ +/* ************************************************************************* */ +double HybridBayesTree::error(const HybridValues& values) const { + return HybridGaussianFactorGraph(*this).error(values); +} + +/* ************************************************************************* */ VectorValues HybridBayesTree::optimize(const DiscreteValues& assignment) const { GaussianBayesTree gbt = this->choose(assignment); // If empty GaussianBayesTree, means a clique is pruned hence invalid diff --git a/gtsam/hybrid/HybridBayesTree.h b/gtsam/hybrid/HybridBayesTree.h index af8eb3228..ab2d8a73d 100644 --- a/gtsam/hybrid/HybridBayesTree.h +++ b/gtsam/hybrid/HybridBayesTree.h @@ -85,9 +85,7 @@ class GTSAM_EXPORT HybridBayesTree : public BayesTree { GaussianBayesTree choose(const DiscreteValues& assignment) const; /** Error for all conditionals. */ - double error(const HybridValues& values) const { - return HybridGaussianFactorGraph(*this).error(values); - } + double error(const HybridValues& values) const; /** * @brief Optimize the hybrid Bayes tree by computing the MPE for the current diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index 52f56f62e..0b2564403 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -115,221 +115,6 @@ TEST(MixtureFactor, Dim) { EXPECT_LONGS_EQUAL(1, mixtureFactor.dim()); } -/* ************************************************************************* */ -// Test components with differing means -TEST(MixtureFactor, DifferentMeans) { - DiscreteKey m1(M(1), 2), m2(M(2), 2); - - Values values; - double x1 = 0.0, x2 = 1.75, x3 = 2.60; - values.insert(X(1), x1); - values.insert(X(2), x2); - values.insert(X(3), x3); - - auto model0 = noiseModel::Isotropic::Sigma(1, 1e-0); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-0); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-0); - - auto f0 = std::make_shared>(X(1), X(2), 0.0, model0); - auto f1 = std::make_shared>(X(1), X(2), 2.0, model1); - std::vector factors{f0, f1}; - - MixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); - HybridNonlinearFactorGraph hnfg; - hnfg.push_back(mixtureFactor); - - f0 = std::make_shared>(X(2), X(3), 0.0, model0); - f1 = std::make_shared>(X(2), X(3), 2.0, model1); - std::vector factors23{f0, f1}; - hnfg.push_back(MixtureFactor({X(2), X(3)}, {m2}, factors23)); - - auto prior = PriorFactor(X(1), x1, prior_noise); - hnfg.push_back(prior); - - hnfg.emplace_shared>(X(2), 2.0, prior_noise); - - auto hgfg = hnfg.linearize(values); - auto bn = hgfg->eliminateSequential(); - HybridValues actual = bn->optimize(); - - HybridValues expected( - VectorValues{ - {X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}, {X(3), Vector1(-0.6)}}, - DiscreteValues{{M(1), 1}, {M(2), 0}}); - - EXPECT(assert_equal(expected, actual)); - - { - DiscreteValues dv{{M(1), 0}, {M(2), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 0}, {M(2), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}, {M(2), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}, {M(2), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); - } -} - -/* ************************************************************************* */ -// Test components with differing covariances -TEST(MixtureFactor, DifferentCovariances) { - DiscreteKey m1(M(1), 2); - - Values values; - double x1 = 1.0, x2 = 1.0; - values.insert(X(1), x1); - values.insert(X(2), x2); - - double between = 0.0; - - auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - - auto f0 = - std::make_shared>(X(1), X(2), between, model0); - auto f1 = - std::make_shared>(X(1), X(2), between, model1); - std::vector factors{f0, f1}; - - // Create via toFactorGraph - using symbol_shorthand::Z; - Matrix H0_1, H0_2, H1_1, H1_2; - Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); - std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H0_1 /*Sp1*/}, - {X(2), H0_2 /*Tp2*/}}; - - Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); - std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H1_1 /*Sp1*/}, - {X(2), H1_2 /*Tp2*/}}; - auto gm = new gtsam::GaussianMixture( - {Z(1)}, {X(1), X(2)}, {m1}, - {std::make_shared(terms0, 1, -d0, model0), - std::make_shared(terms1, 1, -d1, model1)}); - gtsam::HybridBayesNet bn; - bn.emplace_back(gm); - - gtsam::VectorValues measurements; - measurements.insert(Z(1), gtsam::Z_1x1); - // Create FG with single GaussianMixtureFactor - HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); - - // Linearized prior factor on X1 - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); - mixture_fg.push_back(prior); - - auto hbn = mixture_fg.eliminateSequential(); - - VectorValues cv; - cv.insert(X(1), Vector1(0.0)); - cv.insert(X(2), Vector1(0.0)); - - // Check that we get different error values at the MLE point μ. - AlgebraicDecisionTree errorTree = hbn->errorTree(cv); - - HybridValues hv0(cv, DiscreteValues{{M(1), 0}}); - HybridValues hv1(cv, DiscreteValues{{M(1), 1}}); - - auto cond0 = hbn->at(0)->asMixture(); - auto cond1 = hbn->at(1)->asMixture(); - auto discrete_cond = hbn->at(2)->asDiscrete(); - AlgebraicDecisionTree expectedErrorTree(m1, 9.90348755254, - 0.69314718056); - EXPECT(assert_equal(expectedErrorTree, errorTree)); -} - -/* ************************************************************************* */ -// Test components with differing means and covariances -TEST(MixtureFactor, DifferentMeansAndCovariances) { - DiscreteKey m1(M(1), 2); - - Values values; - double x1 = 0.0, x2 = 7.0; - values.insert(X(1), x1); - - double between = 0.0; - - auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - - auto f0 = - std::make_shared>(X(1), X(2), between, model0); - auto f1 = - std::make_shared>(X(1), X(2), between, model1); - std::vector factors{f0, f1}; - - // Create via toFactorGraph - using symbol_shorthand::Z; - Matrix H0_1, H0_2, H1_1, H1_2; - Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); - std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H0_1 /*Sp1*/}, - {X(2), H0_2 /*Tp2*/}}; - - Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); - std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H1_1 /*Sp1*/}, - {X(2), H1_2 /*Tp2*/}}; - auto gm = new gtsam::GaussianMixture( - {Z(1)}, {X(1), X(2)}, {m1}, - {std::make_shared(terms0, 1, -d0, model0), - std::make_shared(terms1, 1, -d1, model1)}); - gtsam::HybridBayesNet bn; - bn.emplace_back(gm); - - gtsam::VectorValues measurements; - measurements.insert(Z(1), gtsam::Z_1x1); - // Create FG with single GaussianMixtureFactor - HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); - - // Linearized prior factor on X1 - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); - mixture_fg.push_back(prior); - - VectorValues vv{{X(1), x1 * I_1x1}, {X(2), x2 * I_1x1}}; - - auto hbn = mixture_fg.eliminateSequential(); - - HybridValues actual_values = hbn->optimize(); - - VectorValues cv; - cv.insert(X(1), Vector1(0.0)); - cv.insert(X(2), Vector1(-7.0)); - - // The first value is chosen as the tiebreaker - DiscreteValues dv; - dv.insert({M(1), 0}); - HybridValues expected_values(cv, dv); - - EXPECT(assert_equal(expected_values, actual_values)); -} - /* ************************************************************************* */ int main() { TestResult tr; From 5cf1e0d220534ddcf712cc6ca09308e9658ba20f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 24 Aug 2024 06:19:00 -0400 Subject: [PATCH 109/398] move NonlinearOptimizerParams::equals definition to .cpp file and improve slightly --- gtsam/nonlinear/NonlinearOptimizerParams.cpp | 22 ++++++++++++++++++++ gtsam/nonlinear/NonlinearOptimizerParams.h | 19 +---------------- 2 files changed, 23 insertions(+), 18 deletions(-) diff --git a/gtsam/nonlinear/NonlinearOptimizerParams.cpp b/gtsam/nonlinear/NonlinearOptimizerParams.cpp index 671dbe34d..800db02a0 100644 --- a/gtsam/nonlinear/NonlinearOptimizerParams.cpp +++ b/gtsam/nonlinear/NonlinearOptimizerParams.cpp @@ -123,6 +123,28 @@ void NonlinearOptimizerParams::print(const std::string& str) const { std::cout.flush(); } +/* ************************************************************************* */ +bool NonlinearOptimizerParams::equals(const NonlinearOptimizerParams& other, + double tol) const { + // Check for equality of shared ptrs + bool iterative_params_equal = iterativeParams == other.iterativeParams; + // Check equality of components + if (iterativeParams && other.iterativeParams) { + iterative_params_equal = iterativeParams->equals(*other.iterativeParams); + } else { + // If one or both shared pointers are null, we can't assume they are equal + iterative_params_equal = false; + } + + return maxIterations == other.getMaxIterations() && + std::abs(relativeErrorTol - other.getRelativeErrorTol()) <= tol && + std::abs(absoluteErrorTol - other.getAbsoluteErrorTol()) <= tol && + std::abs(errorTol - other.getErrorTol()) <= tol && + verbosityTranslator(verbosity) == other.getVerbosity() && + orderingType == other.orderingType && ordering == other.ordering && + linearSolverType == other.linearSolverType && iterative_params_equal; +} + /* ************************************************************************* */ std::string NonlinearOptimizerParams::linearSolverTranslator( LinearSolverType linearSolverType) const { diff --git a/gtsam/nonlinear/NonlinearOptimizerParams.h b/gtsam/nonlinear/NonlinearOptimizerParams.h index 92e22e064..0b4d8b2fd 100644 --- a/gtsam/nonlinear/NonlinearOptimizerParams.h +++ b/gtsam/nonlinear/NonlinearOptimizerParams.h @@ -114,24 +114,7 @@ public: virtual void print(const std::string& str = "") const; - bool equals(const NonlinearOptimizerParams& other, double tol = 1e-9) const { - // Check for equality of shared ptrs - bool iterative_params_equal = false; - if (iterativeParams == other.iterativeParams) { - iterative_params_equal = true; - } - if (iterativeParams && other.iterativeParams) { - iterative_params_equal = iterativeParams->equals(*other.iterativeParams); - } - - return maxIterations == other.getMaxIterations() && - std::abs(relativeErrorTol - other.getRelativeErrorTol()) <= tol && - std::abs(absoluteErrorTol - other.getAbsoluteErrorTol()) <= tol && - std::abs(errorTol - other.getErrorTol()) <= tol && - verbosityTranslator(verbosity) == other.getVerbosity() && - orderingType == other.orderingType && ordering == other.ordering && - linearSolverType == other.linearSolverType && iterative_params_equal; - } + bool equals(const NonlinearOptimizerParams& other, double tol = 1e-9); inline bool isMultifrontal() const { return (linearSolverType == MULTIFRONTAL_CHOLESKY) From 019dad976eae0bb9dcff152dea1bc64030415325 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 24 Aug 2024 06:33:28 -0400 Subject: [PATCH 110/398] fix test --- gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 751e84d91..1dcd0bbb1 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -675,6 +675,8 @@ factor 6: P( m1 | m0 ): size: 3 conditional 0: Hybrid P( x0 | x1 m0) Discrete Keys = (m0, 2), + logNormalizationConstant: 1.38862 + Choice(m0) 0 Leaf p(x0 | x1) R = [ 10.0499 ] @@ -692,6 +694,8 @@ conditional 0: Hybrid P( x0 | x1 m0) conditional 1: Hybrid P( x1 | x2 m0 m1) Discrete Keys = (m0, 2), (m1, 2), + logNormalizationConstant: 1.3935 + Choice(m1) 0 Choice(m0) 0 0 Leaf p(x1 | x2) @@ -725,6 +729,8 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) conditional 2: Hybrid P( x2 | m0 m1) Discrete Keys = (m0, 2), (m1, 2), + logNormalizationConstant: 1.38857 + Choice(m1) 0 Choice(m0) 0 0 Leaf p(x2) From fa33bca0c7ffd240014cd878e2175662a22d4dd4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 24 Aug 2024 06:35:17 -0400 Subject: [PATCH 111/398] make equals const-qualified --- gtsam/nonlinear/NonlinearOptimizerParams.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/nonlinear/NonlinearOptimizerParams.h b/gtsam/nonlinear/NonlinearOptimizerParams.h index 0b4d8b2fd..cef7e85e3 100644 --- a/gtsam/nonlinear/NonlinearOptimizerParams.h +++ b/gtsam/nonlinear/NonlinearOptimizerParams.h @@ -114,7 +114,7 @@ public: virtual void print(const std::string& str = "") const; - bool equals(const NonlinearOptimizerParams& other, double tol = 1e-9); + bool equals(const NonlinearOptimizerParams& other, double tol = 1e-9) const; inline bool isMultifrontal() const { return (linearSolverType == MULTIFRONTAL_CHOLESKY) From c59f458facb10c224383995ebaefeb0fa6883a4a Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 24 Aug 2024 15:32:18 -0400 Subject: [PATCH 112/398] improved check --- gtsam/nonlinear/NonlinearOptimizerParams.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/nonlinear/NonlinearOptimizerParams.cpp b/gtsam/nonlinear/NonlinearOptimizerParams.cpp index 800db02a0..55dfd4561 100644 --- a/gtsam/nonlinear/NonlinearOptimizerParams.cpp +++ b/gtsam/nonlinear/NonlinearOptimizerParams.cpp @@ -132,8 +132,8 @@ bool NonlinearOptimizerParams::equals(const NonlinearOptimizerParams& other, if (iterativeParams && other.iterativeParams) { iterative_params_equal = iterativeParams->equals(*other.iterativeParams); } else { - // If one or both shared pointers are null, we can't assume they are equal - iterative_params_equal = false; + // Check if either is null. If both are null, then true + iterative_params_equal = !iterativeParams && !other.iterativeParams; } return maxIterations == other.getMaxIterations() && From 1aa5883964da8bc036151c563f4f51dc51a26ef0 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 24 Aug 2024 17:18:55 -0700 Subject: [PATCH 113/398] Upgrade action --- .github/workflows/build-linux.yml | 2 +- .github/workflows/build-macos.yml | 2 +- .github/workflows/build-python.yml | 2 +- .github/workflows/build-special.yml | 2 +- .github/workflows/build-windows.yml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build-linux.yml b/.github/workflows/build-linux.yml index 4502dbe0e..da398ad23 100644 --- a/.github/workflows/build-linux.yml +++ b/.github/workflows/build-linux.yml @@ -56,7 +56,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Dependencies run: | diff --git a/.github/workflows/build-macos.yml b/.github/workflows/build-macos.yml index 72e27e3b6..d75042ae8 100644 --- a/.github/workflows/build-macos.yml +++ b/.github/workflows/build-macos.yml @@ -38,7 +38,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Dependencies run: | diff --git a/.github/workflows/build-python.yml b/.github/workflows/build-python.yml index 037704a36..036717624 100644 --- a/.github/workflows/build-python.yml +++ b/.github/workflows/build-python.yml @@ -65,7 +65,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install (Linux) if: runner.os == 'Linux' diff --git a/.github/workflows/build-special.yml b/.github/workflows/build-special.yml index 164646e3e..3a7dd974d 100644 --- a/.github/workflows/build-special.yml +++ b/.github/workflows/build-special.yml @@ -83,7 +83,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install (Linux) if: runner.os == 'Linux' diff --git a/.github/workflows/build-windows.yml b/.github/workflows/build-windows.yml index dcf742c05..1bda7e40a 100644 --- a/.github/workflows/build-windows.yml +++ b/.github/workflows/build-windows.yml @@ -44,7 +44,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup msbuild uses: ilammy/msvc-dev-cmd@v1 From 3d8383089bb19e0f60ce1afa545bf6e27de1e70e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 24 Aug 2024 22:34:57 -0400 Subject: [PATCH 114/398] don't check if GaussianFactor has no contKeys since it may not (e.g. GaussianMixtureModel) --- gtsam/hybrid/GaussianMixtureFactor.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 8471cef33..94bc09407 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -66,7 +66,7 @@ void GaussianMixtureFactor::print(const std::string &s, [&](const sharedFactor &gf) -> std::string { RedirectCout rd; std::cout << ":\n"; - if (gf && !gf->empty()) { + if (gf) { gf->print("", formatter); return rd.str(); } else { From 1f373fd136e1ac46a3e06b1f568e3bff1737989f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 00:48:32 -0400 Subject: [PATCH 115/398] add examples of Hybrid FGs from HBNs as unit tests --- .../tests/testGaussianMixtureFactor.cpp | 233 +++++++++++++++++- 1 file changed, 229 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index dfafb923b..0910d2f40 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -200,6 +200,228 @@ TEST(GaussianMixtureFactor, Error) { 4.0, mixtureFactor.error({continuousValues, discreteValues}), 1e-9); } +/* ************************************************************************* */ +/** + * Test a simple Gaussian Mixture Model represented as P(m)P(z|m) + * where m is a discrete variable and z is a continuous variable. + * m is binary and depending on m, we have 2 different means + * μ1 and μ2 for the Gaussian distribution around which we sample z. + * + * The resulting factor graph should eliminate to a Bayes net + * which represents a sigmoid function. + */ +TEST(GaussianMixtureFactor, GaussianMixtureModel) { + double mu0 = 1.0, mu1 = 3.0; + double sigma = 2.0; + auto model = noiseModel::Isotropic::Sigma(1, sigma); + + DiscreteKey m(M(0), 2); + Key z = Z(0); + + auto c0 = make_shared(z, Vector1(mu0), I_1x1, model), + c1 = make_shared(z, Vector1(mu1), I_1x1, model); + + auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1}); + auto mixing = new DiscreteConditional(m, "0.5/0.5"); + + HybridBayesNet hbn; + hbn.emplace_back(gm); + hbn.emplace_back(mixing); + + // The result should be a sigmoid. + // So should be m = 0.5 at z=3.0 - 1.0=2.0 + VectorValues given; + given.insert(z, Vector1(mu1 - mu0)); + + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + HybridBayesNet expected; + expected.emplace_back(new DiscreteConditional(m, "0.5/0.5")); + + EXPECT(assert_equal(expected, *bn)); +} + +/* ************************************************************************* */ +/** + * Test a simple Gaussian Mixture Model represented as P(m)P(z|m) + * where m is a discrete variable and z is a continuous variable. + * m is binary and depending on m, we have 2 different means + * and covariances each for the + * Gaussian distribution around which we sample z. + * + * The resulting factor graph should eliminate to a Bayes net + * which represents a sigmoid function leaning towards + * the tighter covariance Gaussian. + */ +TEST(GaussianMixtureFactor, GaussianMixtureModel2) { + double mu0 = 1.0, mu1 = 3.0; + auto model0 = noiseModel::Isotropic::Sigma(1, 8.0); + auto model1 = noiseModel::Isotropic::Sigma(1, 4.0); + + DiscreteKey m(M(0), 2); + Key z = Z(0); + + auto c0 = make_shared(z, Vector1(mu0), I_1x1, model0), + c1 = make_shared(z, Vector1(mu1), I_1x1, model1); + + auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1}); + auto mixing = new DiscreteConditional(m, "0.5/0.5"); + + HybridBayesNet hbn; + hbn.emplace_back(gm); + hbn.emplace_back(mixing); + + // The result should be a sigmoid leaning towards model1 + // since it has the tighter covariance. + // So should be m = 0.34/0.66 at z=3.0 - 1.0=2.0 + VectorValues given; + given.insert(z, Vector1(mu1 - mu0)); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + HybridBayesNet expected; + expected.emplace_back( + new DiscreteConditional(m, "0.338561851224/0.661438148776")); + + EXPECT(assert_equal(expected, *bn)); +} + +/* ************************************************************************* */ +/** + * Test a model P(x0)P(z0|x0)p(x1|m1)p(z1|x1)p(m1). + * + * p(x1|m1) has different means and same covariance. + * + * Converting to a factor graph gives us + * P(x0)ϕ(x0)P(x1|m1)ϕ(x1)P(m1) + * + * If we only have a measurement on z0, then + * the probability of x1 should be 0.5/0.5. + * Getting a measurement on z1 gives use more information. + */ +TEST(GaussianMixtureFactor, TwoStateModel) { + double mu0 = 1.0, mu1 = 3.0; + auto model = noiseModel::Isotropic::Sigma(1, 2.0); + + DiscreteKey m1(M(1), 2); + Key z0 = Z(0), z1 = Z(1), x0 = X(0), x1 = X(1); + + auto c0 = make_shared(x1, Vector1(mu0), I_1x1, model), + c1 = make_shared(x1, Vector1(mu1), I_1x1, model); + + auto p_x0 = new GaussianConditional(x0, Vector1(0.0), I_1x1, + noiseModel::Isotropic::Sigma(1, 1.0)); + auto p_z0x0 = new GaussianConditional(z0, Vector1(0.0), I_1x1, x0, -I_1x1, + noiseModel::Isotropic::Sigma(1, 1.0)); + auto p_x1m1 = new GaussianMixture({x1}, {}, {m1}, {c0, c1}); + auto p_z1x1 = new GaussianConditional(z1, Vector1(0.0), I_1x1, x1, -I_1x1, + noiseModel::Isotropic::Sigma(1, 3.0)); + auto p_m1 = new DiscreteConditional(m1, "0.5/0.5"); + + HybridBayesNet hbn; + hbn.emplace_back(p_x0); + hbn.emplace_back(p_z0x0); + hbn.emplace_back(p_x1m1); + hbn.emplace_back(p_m1); + + VectorValues given; + given.insert(z0, Vector1(0.5)); + + { + // Start with no measurement on x1, only on x0 + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Since no measurement on x1, we hedge our bets + DiscreteConditional expected(m1, "0.5/0.5"); + + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()))); + } + + { + // Now we add a measurement z1 on x1 + hbn.emplace_back(p_z1x1); + + given.insert(z1, Vector1(2.2)); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Since we have a measurement on z2, we get a definite result + DiscreteConditional expected(m1, "0.4923083/0.5076917"); + + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); + } +} + +/* ************************************************************************* */ +/** + * Test a model P(x0)P(z0|x0)p(x1|m1)p(z1|x1)p(m1). + * + * p(x1|m1) has different means and different covariances. + * + * Converting to a factor graph gives us + * P(x0)ϕ(x0)P(x1|m1)ϕ(x1)P(m1) + * + * If we only have a measurement on z0, then + * the probability of x1 should be the ratio of covariances. + * Getting a measurement on z1 gives use more information. + */ +TEST(GaussianMixtureFactor, TwoStateModel2) { + double mu0 = 1.0, mu1 = 3.0; + auto model0 = noiseModel::Isotropic::Sigma(1, 6.0); + auto model1 = noiseModel::Isotropic::Sigma(1, 4.0); + + DiscreteKey m1(M(1), 2); + Key z0 = Z(0), z1 = Z(1), x0 = X(0), x1 = X(1); + + auto c0 = make_shared(x1, Vector1(mu0), I_1x1, model0), + c1 = make_shared(x1, Vector1(mu1), I_1x1, model1); + + auto p_x0 = new GaussianConditional(x0, Vector1(0.0), I_1x1, + noiseModel::Isotropic::Sigma(1, 1.0)); + auto p_z0x0 = new GaussianConditional(z0, Vector1(0.0), I_1x1, x0, -I_1x1, + noiseModel::Isotropic::Sigma(1, 1.0)); + auto p_x1m1 = new GaussianMixture({x1}, {}, {m1}, {c0, c1}); + auto p_z1x1 = new GaussianConditional(z1, Vector1(0.0), I_1x1, x1, -I_1x1, + noiseModel::Isotropic::Sigma(1, 3.0)); + auto p_m1 = new DiscreteConditional(m1, "0.5/0.5"); + + HybridBayesNet hbn; + hbn.emplace_back(p_x0); + hbn.emplace_back(p_z0x0); + hbn.emplace_back(p_x1m1); + hbn.emplace_back(p_m1); + + VectorValues given; + given.insert(z0, Vector1(0.5)); + + { + // Start with no measurement on x1, only on x0 + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Since no measurement on x1, we get the ratio of covariances. + DiscreteConditional expected(m1, "0.6/0.4"); + + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()))); + } + + { + // Now we add a measurement z1 on x1 + hbn.emplace_back(p_z1x1); + + given.insert(z1, Vector1(2.2)); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Since we have a measurement on z2, we get a definite result + DiscreteConditional expected(m1, "0.52706646/0.47293354"); + + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); + } +} + /** * @brief Helper function to specify a Hybrid Bayes Net * {P(X1) P(Z1 | X1, X2, M1)} and convert it to a Hybrid Factor Graph @@ -271,11 +493,12 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( * * We specify a hybrid Bayes network P(Z | X, M) =p(X1)p(Z1 | X1, X2, M1), * which is then converted to a factor graph by specifying Z1. - * - * p(Z1 | X1, X2, M1) has 2 factors each for the binary mode m1, with only the - * means being different. + * This is a different case since now we have a hybrid factor + * with 2 continuous variables ϕ(x1, x2, m1). + * p(Z1 | X1, X2, M1) has 2 factors each for the binary + * mode m1, with only the means being different. */ -TEST(GaussianMixtureFactor, DifferentMeansHBN) { +TEST(GaussianMixtureFactor, DifferentMeans) { DiscreteKey m1(M(1), 2); Values values; @@ -355,6 +578,8 @@ TEST(GaussianMixtureFactor, DifferentMeansHBN) { /** * @brief Test components with differing covariances * but with a Bayes net P(Z|X, M) converted to a FG. + * Same as the DifferentMeans example but in this case, + * we keep the means the same and vary the covariances. */ TEST(GaussianMixtureFactor, DifferentCovariances) { DiscreteKey m1(M(1), 2); From 7358effe038d0cb82c9b378a21c8afabfa0ea269 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 00:49:28 -0400 Subject: [PATCH 116/398] allow for GaussianMixtureFactors with 0 continuous keys --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index bf11a50fc..cb8ceed20 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -242,6 +242,18 @@ discreteElimination(const HybridGaussianFactorGraph &factors, for (auto &f : factors) { if (auto df = dynamic_pointer_cast(f)) { dfg.push_back(df); + } else if (auto gmf = dynamic_pointer_cast(f)) { + // Case where we have a GaussianMixtureFactor with no continuous keys. + // In this case, compute discrete probabilities. + auto probability = + [&](const GaussianFactor::shared_ptr &factor) -> double { + if (!factor) return 0.0; + return exp(-factor->error(VectorValues())); + }; + dfg.emplace_shared( + gmf->discreteKeys(), + DecisionTree(gmf->factors(), probability)); + } else if (auto orphan = dynamic_pointer_cast(f)) { // Ignore orphaned clique. // TODO(dellaert): is this correct? If so explain here. From 88c2ad55bea7d23148029f895cec4f453ca3b8ad Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 00:49:36 -0400 Subject: [PATCH 117/398] better docstring --- gtsam/hybrid/GaussianMixtureFactor.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index b1be6ef1d..2459af259 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -80,8 +80,8 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys A vector of keys representing continuous variables. * @param discreteKeys A vector of keys representing discrete variables and * their cardinalities. - * @param factors The decision tree of Gaussian factors stored as the mixture - * density. + * @param factors The decision tree of Gaussian factors stored + * as the mixture density. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, From 1725deff1bd4b696467cad06308444f301506bf3 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 01:50:57 -0400 Subject: [PATCH 118/398] better GaussianMixture printing --- gtsam/hybrid/GaussianMixture.cpp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 729842485..888872f81 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -144,8 +144,9 @@ void GaussianMixture::print(const std::string &s, for (auto &dk : discreteKeys()) { std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), "; } - std::cout << "\n logNormalizationConstant: " << logConstant_ << std::endl; - std::cout << "\n"; + std::cout << std::endl + << " logNormalizationConstant: " << logConstant_ << std::endl + << std::endl; conditionals_.print( "", [&](Key k) { return formatter(k); }, [&](const GaussianConditional::shared_ptr &gf) -> std::string { From bf70087fff9d5454a311039375b3c8512d2d47d7 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 04:35:02 -0400 Subject: [PATCH 119/398] Make default PartialPriorFactor constructor public --- gtsam_unstable/slam/PartialPriorFactor.h | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/gtsam_unstable/slam/PartialPriorFactor.h b/gtsam_unstable/slam/PartialPriorFactor.h index b70fe8b19..7722e5d82 100644 --- a/gtsam_unstable/slam/PartialPriorFactor.h +++ b/gtsam_unstable/slam/PartialPriorFactor.h @@ -50,9 +50,6 @@ namespace gtsam { Vector prior_; ///< Measurement on tangent space parameters, in compressed form. std::vector indices_; ///< Indices of the measured tangent space parameters. - /** default constructor - only use for serialization */ - PartialPriorFactor() {} - /** * constructor with just minimum requirements for a factor - allows more * computation in the constructor. This should only be used by subclasses @@ -65,7 +62,8 @@ namespace gtsam { // Provide access to the Matrix& version of evaluateError: using Base::evaluateError; - ~PartialPriorFactor() override {} + /** default constructor - only use for serialization */ + PartialPriorFactor() {} /** Single Element Constructor: Prior on a single parameter at index 'idx' in the tangent vector.*/ PartialPriorFactor(Key key, size_t idx, double prior, const SharedNoiseModel& model) : @@ -85,6 +83,8 @@ namespace gtsam { assert(model->dim() == (size_t)prior.size()); } + ~PartialPriorFactor() override {} + /// @return a deep copy of this factor gtsam::NonlinearFactor::shared_ptr clone() const override { return std::static_pointer_cast( From 2e39fef721f8e1dd9a824d6c0e3fed55080e7bd8 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 04:35:16 -0400 Subject: [PATCH 120/398] wrap PartialPriorFactor --- gtsam_unstable/gtsam_unstable.i | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/gtsam_unstable/gtsam_unstable.i b/gtsam_unstable/gtsam_unstable.i index 8566934dd..3beb845fb 100644 --- a/gtsam_unstable/gtsam_unstable.i +++ b/gtsam_unstable/gtsam_unstable.i @@ -671,6 +671,21 @@ class AHRS { //void print(string s) const; }; +#include +template +virtual class PartialPriorFactor : gtsam::NoiseModelFactor { + PartialPriorFactor(gtsam::Key key, size_t idx, double prior, + const gtsam::noiseModel::Base* model); + PartialPriorFactor(gtsam::Key key, const std::vector& indices, + const gtsam::Vector& prior, + const gtsam::noiseModel::Base* model); + + // enabling serialization functionality + void serialize() const; + + const gtsam::Vector& prior(); +}; + // Tectonic SAM Factors #include From 95da15a61abac9b6bf66e25e0193841964d86870 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 04:35:33 -0400 Subject: [PATCH 121/398] allow stl binding for pybind11 in gtsam_unstable --- python/CMakeLists.txt | 2 ++ python/gtsam_unstable/gtsam_unstable.tpl | 1 + 2 files changed, 3 insertions(+) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index ba55ac2af..0c9dd6967 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -189,6 +189,8 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON) gtsam::BinaryMeasurementsPoint3 gtsam::BinaryMeasurementsUnit3 gtsam::BinaryMeasurementsRot3 + gtsam::SimWall2DVector + gtsam::SimPolygon2DVector gtsam::CameraSetCal3_S2 gtsam::CameraSetCal3Bundler gtsam::CameraSetCal3Unified diff --git a/python/gtsam_unstable/gtsam_unstable.tpl b/python/gtsam_unstable/gtsam_unstable.tpl index b98872c46..006ca7fc8 100644 --- a/python/gtsam_unstable/gtsam_unstable.tpl +++ b/python/gtsam_unstable/gtsam_unstable.tpl @@ -9,6 +9,7 @@ #include #include +#include #include #include #include From b94ab31e1f058d3fac9596922a6bda01fb1423a5 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 04:36:00 -0400 Subject: [PATCH 122/398] localization example script which gives exact same result as C++ version --- .../examples/LocalizationExample.py | 68 +++++++++++++++++++ 1 file changed, 68 insertions(+) create mode 100644 python/gtsam_unstable/examples/LocalizationExample.py diff --git a/python/gtsam_unstable/examples/LocalizationExample.py b/python/gtsam_unstable/examples/LocalizationExample.py new file mode 100644 index 000000000..ad8266d6d --- /dev/null +++ b/python/gtsam_unstable/examples/LocalizationExample.py @@ -0,0 +1,68 @@ +""" +A simple 2D pose slam example with "GPS" measurements + - The robot moves forward 2 meter each iteration + - The robot initially faces along the X axis (horizontal, to the right in 2D) + - We have full odometry between pose + - We have "GPS-like" measurements implemented with a custom factor +""" +import numpy as np + +import gtsam +from gtsam import BetweenFactorPose2, Pose2, noiseModel +from gtsam_unstable import PartialPriorFactorPose2 + + +def main(): + # 1. Create a factor graph container and add factors to it. + graph = gtsam.NonlinearFactorGraph() + + # 2a. Add odometry factors + # For simplicity, we will use the same noise model for each odometry factor + odometryNoise = noiseModel.Diagonal.Sigmas(np.asarray([0.2, 0.2, 0.1])) + + # Create odometry (Between) factors between consecutive poses + graph.push_back( + BetweenFactorPose2(1, 2, Pose2(2.0, 0.0, 0.0), odometryNoise)) + graph.push_back( + BetweenFactorPose2(2, 3, Pose2(2.0, 0.0, 0.0), odometryNoise)) + + # 2b. Add "GPS-like" measurements + # We will use PartialPrior factor for this. + unaryNoise = noiseModel.Diagonal.Sigmas(np.array([0.1, + 0.1])) # 10cm std on x,y + + graph.push_back( + PartialPriorFactorPose2(1, [0, 1], np.asarray([0.0, 0.0]), unaryNoise)) + graph.push_back( + PartialPriorFactorPose2(2, [0, 1], np.asarray([2.0, 0.0]), unaryNoise)) + graph.push_back( + PartialPriorFactorPose2(3, [0, 1], np.asarray([4.0, 0.0]), unaryNoise)) + graph.print("\nFactor Graph:\n") + + # 3. Create the data structure to hold the initialEstimate estimate to the solution + # For illustrative purposes, these have been deliberately set to incorrect values + initialEstimate = gtsam.Values() + initialEstimate.insert(1, Pose2(0.5, 0.0, 0.2)) + initialEstimate.insert(2, Pose2(2.3, 0.1, -0.2)) + initialEstimate.insert(3, Pose2(4.1, 0.1, 0.1)) + initialEstimate.print("\nInitial Estimate:\n") + + # 4. Optimize using Levenberg-Marquardt optimization. The optimizer + # accepts an optional set of configuration parameters, controlling + # things like convergence criteria, the type of linear system solver + # to use, and the amount of information displayed during optimization. + # Here we will use the default set of parameters. See the + # documentation for the full set of parameters. + optimizer = gtsam.LevenbergMarquardtOptimizer(graph, initialEstimate) + result = optimizer.optimize() + result.print("Final Result:\n") + + # 5. Calculate and print marginal covariances for all variables + marginals = gtsam.Marginals(graph, result) + print("x1 covariance:\n", marginals.marginalCovariance(1)) + print("x2 covariance:\n", marginals.marginalCovariance(2)) + print("x3 covariance:\n", marginals.marginalCovariance(3)) + + +if __name__ == "__main__": + main() From c57d5a0f7052b9a3b4c34b95b1d795bebe312632 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 07:23:31 -0400 Subject: [PATCH 123/398] Updated porting progress --- python/gtsam/examples/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/gtsam/examples/README.md b/python/gtsam/examples/README.md index 2a2c9f2ef..70f6cfcb0 100644 --- a/python/gtsam/examples/README.md +++ b/python/gtsam/examples/README.md @@ -17,7 +17,7 @@ | InverseKinematicsExampleExpressions.cpp | | | ISAM2Example_SmartFactor | | | ISAM2_SmartFactorStereo_IMU | | -| LocalizationExample | impossible? | +| LocalizationExample | :heavy_check_mark: | | METISOrderingExample | | | OdometryExample | :heavy_check_mark: | | PlanarSLAMExample | :heavy_check_mark: | From d20993f74015f0697e3a699659db001dcd3d8385 Mon Sep 17 00:00:00 2001 From: Fan Jiang Date: Sun, 25 Aug 2024 13:24:25 -0400 Subject: [PATCH 124/398] fix C++ version issue of CppUnitLite --- CppUnitLite/CMakeLists.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/CppUnitLite/CMakeLists.txt b/CppUnitLite/CMakeLists.txt index cbffa79d1..e3b2dfadb 100644 --- a/CppUnitLite/CMakeLists.txt +++ b/CppUnitLite/CMakeLists.txt @@ -6,6 +6,7 @@ file(GLOB cppunitlite_src "*.cpp") add_library(CppUnitLite STATIC ${cppunitlite_src} ${cppunitlite_headers}) list(APPEND GTSAM_EXPORTED_TARGETS CppUnitLite) set(GTSAM_EXPORTED_TARGETS "${GTSAM_EXPORTED_TARGETS}" PARENT_SCOPE) +target_compile_features(CppUnitLite PUBLIC ${GTSAM_COMPILE_FEATURES_PUBLIC}) gtsam_assign_source_folders("${cppunitlite_headers};${cppunitlite_src}") # MSVC project structure From b54ed7209eccca587bf8b77c33925cd5ecae0081 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 13:51:14 -0400 Subject: [PATCH 125/398] split HybridValues into .h and .cpp --- gtsam/hybrid/HybridValues.cpp | 167 ++++++++++++++++++++++++++++++++++ gtsam/hybrid/HybridValues.h | 106 ++++++--------------- 2 files changed, 196 insertions(+), 77 deletions(-) create mode 100644 gtsam/hybrid/HybridValues.cpp diff --git a/gtsam/hybrid/HybridValues.cpp b/gtsam/hybrid/HybridValues.cpp new file mode 100644 index 000000000..1fb2a2adb --- /dev/null +++ b/gtsam/hybrid/HybridValues.cpp @@ -0,0 +1,167 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridValues.cpp + * @author Varun Agrawal + * @date August 2024 + */ + +#include +#include +#include +#include +#include + +namespace gtsam { + +/* ************************************************************************* */ +HybridValues::HybridValues(const VectorValues& cv, const DiscreteValues& dv) + : continuous_(cv), discrete_(dv) {} + +/* ************************************************************************* */ +HybridValues::HybridValues(const VectorValues& cv, const DiscreteValues& dv, + const Values& v) + : continuous_(cv), discrete_(dv), nonlinear_(v) {} + +/* ************************************************************************* */ +void HybridValues::print(const std::string& s, + const KeyFormatter& keyFormatter) const { + std::cout << s << ": \n"; + continuous_.print(" Continuous", + keyFormatter); // print continuous components + discrete_.print(" Discrete", keyFormatter); // print discrete components +} + +/* ************************************************************************* */ +bool HybridValues::equals(const HybridValues& other, double tol) const { + return continuous_.equals(other.continuous_, tol) && + discrete_.equals(other.discrete_, tol); +} + +/* ************************************************************************* */ +const VectorValues& HybridValues::continuous() const { return continuous_; } + +/* ************************************************************************* */ +const DiscreteValues& HybridValues::discrete() const { return discrete_; } + +/* ************************************************************************* */ +const Values& HybridValues::nonlinear() const { return nonlinear_; } + +/* ************************************************************************* */ +bool HybridValues::existsVector(Key j) { return continuous_.exists(j); } + +/* ************************************************************************* */ +bool HybridValues::existsDiscrete(Key j) { + return (discrete_.find(j) != discrete_.end()); +} + +/* ************************************************************************* */ +bool HybridValues::existsNonlinear(Key j) { return nonlinear_.exists(j); } + +/* ************************************************************************* */ +bool HybridValues::exists(Key j) { + return existsVector(j) || existsDiscrete(j) || existsNonlinear(j); +} + +/* ************************************************************************* */ +HybridValues HybridValues::retract(const VectorValues& delta) const { + HybridValues updated(continuous_, discrete_, nonlinear_.retract(delta)); + return updated; +} + +/* ************************************************************************* */ +void HybridValues::insert(Key j, const Vector& value) { + continuous_.insert(j, value); +} + +/* ************************************************************************* */ +void HybridValues::insert(Key j, size_t value) { discrete_[j] = value; } + +/* ************************************************************************* */ +void HybridValues::insert_or_assign(Key j, const Vector& value) { + continuous_.insert_or_assign(j, value); +} + +/* ************************************************************************* */ +void HybridValues::insert_or_assign(Key j, size_t value) { + discrete_[j] = value; +} + +/* ************************************************************************* */ +HybridValues& HybridValues::insert(const VectorValues& values) { + continuous_.insert(values); + return *this; +} + +/* ************************************************************************* */ +HybridValues& HybridValues::insert(const DiscreteValues& values) { + discrete_.insert(values); + return *this; +} + +/* ************************************************************************* */ +HybridValues& HybridValues::insert(const Values& values) { + nonlinear_.insert(values); + return *this; +} + +/* ************************************************************************* */ +HybridValues& HybridValues::insert(const HybridValues& values) { + continuous_.insert(values.continuous()); + discrete_.insert(values.discrete()); + nonlinear_.insert(values.nonlinear()); + return *this; +} + +/* ************************************************************************* */ +Vector& HybridValues::at(Key j) { return continuous_.at(j); } + +/* ************************************************************************* */ +size_t& HybridValues::atDiscrete(Key j) { return discrete_.at(j); } + +/* ************************************************************************* */ +HybridValues& HybridValues::update(const VectorValues& values) { + continuous_.update(values); + return *this; +} + +/* ************************************************************************* */ +HybridValues& HybridValues::update(const DiscreteValues& values) { + discrete_.update(values); + return *this; +} + +/* ************************************************************************* */ +HybridValues& HybridValues::update(const HybridValues& values) { + continuous_.update(values.continuous()); + discrete_.update(values.discrete()); + return *this; +} + +/* ************************************************************************* */ +VectorValues HybridValues::continuousSubset(const KeyVector& keys) const { + VectorValues measurements; + for (const auto& key : keys) { + measurements.insert(key, continuous_.at(key)); + } + return measurements; +} + +/* ************************************************************************* */ +std::string HybridValues::html(const KeyFormatter& keyFormatter) const { + std::stringstream ss; + ss << this->continuous_.html(keyFormatter); + ss << this->discrete_.html(keyFormatter); + return ss.str(); +} + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridValues.h b/gtsam/hybrid/HybridValues.h index 09e7d773c..07ff3e9e3 100644 --- a/gtsam/hybrid/HybridValues.h +++ b/gtsam/hybrid/HybridValues.h @@ -18,8 +18,6 @@ #pragma once -#include -#include #include #include #include @@ -55,13 +53,11 @@ class GTSAM_EXPORT HybridValues { HybridValues() = default; /// Construct from DiscreteValues and VectorValues. - HybridValues(const VectorValues& cv, const DiscreteValues& dv) - : continuous_(cv), discrete_(dv) {} + HybridValues(const VectorValues& cv, const DiscreteValues& dv); /// Construct from all values types. HybridValues(const VectorValues& cv, const DiscreteValues& dv, - const Values& v) - : continuous_(cv), discrete_(dv), nonlinear_(v) {} + const Values& v); /// @} /// @name Testable @@ -69,144 +65,105 @@ class GTSAM_EXPORT HybridValues { /// print required by Testable for unit testing void print(const std::string& s = "HybridValues", - const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { - std::cout << s << ": \n"; - continuous_.print(" Continuous", - keyFormatter); // print continuous components - discrete_.print(" Discrete", keyFormatter); // print discrete components - } + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; /// equals required by Testable for unit testing - bool equals(const HybridValues& other, double tol = 1e-9) const { - return continuous_.equals(other.continuous_, tol) && - discrete_.equals(other.discrete_, tol); - } + bool equals(const HybridValues& other, double tol = 1e-9) const; /// @} /// @name Interface /// @{ /// Return the multi-dimensional vector values. - const VectorValues& continuous() const { return continuous_; } + const VectorValues& continuous() const; /// Return the discrete values. - const DiscreteValues& discrete() const { return discrete_; } + const DiscreteValues& discrete() const; /// Return the nonlinear values. - const Values& nonlinear() const { return nonlinear_; } + const Values& nonlinear() const; /// Check whether a variable with key \c j exists in VectorValues. - bool existsVector(Key j) { return continuous_.exists(j); } + bool existsVector(Key j); /// Check whether a variable with key \c j exists in DiscreteValues. - bool existsDiscrete(Key j) { return (discrete_.find(j) != discrete_.end()); } + bool existsDiscrete(Key j); /// Check whether a variable with key \c j exists in values. - bool existsNonlinear(Key j) { return nonlinear_.exists(j); } + bool existsNonlinear(Key j); /// Check whether a variable with key \c j exists. - bool exists(Key j) { - return existsVector(j) || existsDiscrete(j) || existsNonlinear(j); - } + bool exists(Key j); + + /** Add a delta config to current config and returns a new config */ + HybridValues retract(const VectorValues& delta) const; /** Insert a vector \c value with key \c j. Throws an invalid_argument * exception if the key \c j is already used. * @param value The vector to be inserted. * @param j The index with which the value will be associated. */ - void insert(Key j, const Vector& value) { continuous_.insert(j, value); } + void insert(Key j, const Vector& value); /** Insert a discrete \c value with key \c j. Replaces the existing value if * the key \c j is already used. * @param value The vector to be inserted. * @param j The index with which the value will be associated. */ - void insert(Key j, size_t value) { discrete_[j] = value; } + void insert(Key j, size_t value); /// insert_or_assign() , similar to Values.h - void insert_or_assign(Key j, const Vector& value) { - continuous_.insert_or_assign(j, value); - } + void insert_or_assign(Key j, const Vector& value); /// insert_or_assign() , similar to Values.h - void insert_or_assign(Key j, size_t value) { discrete_[j] = value; } + void insert_or_assign(Key j, size_t value); /** Insert all continuous values from \c values. Throws an invalid_argument * exception if any keys to be inserted are already used. */ - HybridValues& insert(const VectorValues& values) { - continuous_.insert(values); - return *this; - } + HybridValues& insert(const VectorValues& values); /** Insert all discrete values from \c values. Throws an invalid_argument * exception if any keys to be inserted are already used. */ - HybridValues& insert(const DiscreteValues& values) { - discrete_.insert(values); - return *this; - } + HybridValues& insert(const DiscreteValues& values); /** Insert all values from \c values. Throws an invalid_argument * exception if any keys to be inserted are already used. */ - HybridValues& insert(const Values& values) { - nonlinear_.insert(values); - return *this; - } + HybridValues& insert(const Values& values); /** Insert all values from \c values. Throws an invalid_argument exception if * any keys to be inserted are already used. */ - HybridValues& insert(const HybridValues& values) { - continuous_.insert(values.continuous()); - discrete_.insert(values.discrete()); - nonlinear_.insert(values.nonlinear()); - return *this; - } + HybridValues& insert(const HybridValues& values); /** * Read/write access to the vector value with key \c j, throws * std::out_of_range if \c j does not exist. */ - Vector& at(Key j) { return continuous_.at(j); } + Vector& at(Key j); /** * Read/write access to the discrete value with key \c j, throws * std::out_of_range if \c j does not exist. */ - size_t& atDiscrete(Key j) { return discrete_.at(j); } + size_t& atDiscrete(Key j); /** For all key/value pairs in \c values, replace continuous values with * corresponding keys in this object with those in \c values. Throws * std::out_of_range if any keys in \c values are not present in this object. */ - HybridValues& update(const VectorValues& values) { - continuous_.update(values); - return *this; - } + HybridValues& update(const VectorValues& values); /** For all key/value pairs in \c values, replace discrete values with * corresponding keys in this object with those in \c values. Throws * std::out_of_range if any keys in \c values are not present in this object. */ - HybridValues& update(const DiscreteValues& values) { - discrete_.update(values); - return *this; - } + HybridValues& update(const DiscreteValues& values); /** For all key/value pairs in \c values, replace all values with * corresponding keys in this object with those in \c values. Throws * std::out_of_range if any keys in \c values are not present in this object. */ - HybridValues& update(const HybridValues& values) { - continuous_.update(values.continuous()); - discrete_.update(values.discrete()); - return *this; - } + HybridValues& update(const HybridValues& values); /// Extract continuous values with given keys. - VectorValues continuousSubset(const KeyVector& keys) const { - VectorValues measurements; - for (const auto& key : keys) { - measurements.insert(key, continuous_.at(key)); - } - return measurements; - } + VectorValues continuousSubset(const KeyVector& keys) const; /// @} /// @name Wrapper support @@ -219,12 +176,7 @@ class GTSAM_EXPORT HybridValues { * @return string html output. */ std::string html( - const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { - std::stringstream ss; - ss << this->continuous_.html(keyFormatter); - ss << this->discrete_.html(keyFormatter); - return ss.str(); - } + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; /// @} }; From 351f0bd3a569dcb9cdcb35f382b0a5d69f05a69b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 13:51:33 -0400 Subject: [PATCH 126/398] add improved versions of push_back for HybridBayesNet --- gtsam/hybrid/HybridBayesNet.h | 74 +++++++++++++++---- .../tests/testGaussianMixtureFactor.cpp | 21 +++--- 2 files changed, 70 insertions(+), 25 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 032cd55b9..891be75da 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -33,6 +33,18 @@ namespace gtsam { * @ingroup hybrid */ class GTSAM_EXPORT HybridBayesNet : public BayesNet { + template + struct is_shared_ptr : std::false_type {}; + template + struct is_shared_ptr> : std::true_type {}; + + /// Helper templates for checking if a type is a shared pointer or not + template + using IsSharedPtr = typename std::enable_if::value>::type; + template + using IsNotSharedPtr = + typename std::enable_if::value>::type; + public: using Base = BayesNet; using This = HybridBayesNet; @@ -70,20 +82,6 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { factors_.push_back(conditional); } - /** - * Preferred: add a conditional directly using a pointer. - * - * Examples: - * hbn.emplace_back(new GaussianMixture(...))); - * hbn.emplace_back(new GaussianConditional(...))); - * hbn.emplace_back(new DiscreteConditional(...))); - */ - template - void emplace_back(Conditional *conditional) { - factors_.push_back(std::make_shared( - std::shared_ptr(conditional))); - } - /** * Add a conditional using a shared_ptr, using implicit conversion to * a HybridConditional. @@ -101,6 +99,54 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { std::make_shared(std::move(conditional))); } + /** + * @brief Add a conditional to the Bayes net. + * Implicitly convert to a HybridConditional. + * + * E.g. + * hbn.push_back(std::make_shared(m, "1/1")); + * + * @tparam CONDITIONAL Type of conditional. This is shared_ptr version. + * @param conditional The conditional as a shared pointer. + * @return IsSharedPtr + */ + template + IsSharedPtr push_back(const CONDITIONAL &conditional) { + factors_.push_back(std::make_shared(conditional)); + } + + /** + * @brief Add a conditional to the Bayes net. + * Implicitly convert to a HybridConditional. + * + * E.g. + * hbn.push_back(DiscreteConditional(m, "1/1")); + * hbn.push_back(GaussianConditional(X(0), Vector1(0.0), I_1x1)); + * + * @tparam CONDITIONAL Type of conditional. This is const ref version. + * @param conditional The conditional as a const reference. + * @return IsSharedPtr + */ + template + IsNotSharedPtr push_back(const CONDITIONAL &conditional) { + auto cond_shared_ptr = std::make_shared(conditional); + push_back(cond_shared_ptr); + } + + /** + * Preferred: add a conditional directly using a pointer. + * + * Examples: + * hbn.emplace_back(new GaussianMixture(...))); + * hbn.emplace_back(new GaussianConditional(...))); + * hbn.emplace_back(new DiscreteConditional(...))); + */ + template + void emplace_back(Conditional *conditional) { + factors_.push_back(std::make_shared( + std::shared_ptr(conditional))); + } + /** * @brief Get the Gaussian Bayes Net which corresponds to a specific discrete * value assignment. diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 0910d2f40..a854f20d8 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -221,12 +221,12 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { auto c0 = make_shared(z, Vector1(mu0), I_1x1, model), c1 = make_shared(z, Vector1(mu1), I_1x1, model); - auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1}); - auto mixing = new DiscreteConditional(m, "0.5/0.5"); + GaussianMixture gm({z}, {}, {m}, {c0, c1}); + DiscreteConditional mixing(m, "0.5/0.5"); HybridBayesNet hbn; - hbn.emplace_back(gm); - hbn.emplace_back(mixing); + hbn.push_back(gm); + hbn.push_back(mixing); // The result should be a sigmoid. // So should be m = 0.5 at z=3.0 - 1.0=2.0 @@ -237,7 +237,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); HybridBayesNet expected; - expected.emplace_back(new DiscreteConditional(m, "0.5/0.5")); + expected.push_back(DiscreteConditional(m, "0.5/0.5")); EXPECT(assert_equal(expected, *bn)); } @@ -265,12 +265,12 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel2) { auto c0 = make_shared(z, Vector1(mu0), I_1x1, model0), c1 = make_shared(z, Vector1(mu1), I_1x1, model1); - auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1}); - auto mixing = new DiscreteConditional(m, "0.5/0.5"); + GaussianMixture gm({z}, {}, {m}, {c0, c1}); + DiscreteConditional mixing(m, "0.5/0.5"); HybridBayesNet hbn; - hbn.emplace_back(gm); - hbn.emplace_back(mixing); + hbn.push_back(gm); + hbn.push_back(mixing); // The result should be a sigmoid leaning towards model1 // since it has the tighter covariance. @@ -281,8 +281,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel2) { HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); HybridBayesNet expected; - expected.emplace_back( - new DiscreteConditional(m, "0.338561851224/0.661438148776")); + expected.push_back(DiscreteConditional(m, "0.338561851224/0.661438148776")); EXPECT(assert_equal(expected, *bn)); } From 353ff644fcdf51ac3c9e94f99b9e2574dd1fe686 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 24 Jul 2022 12:40:11 -0400 Subject: [PATCH 127/398] generate python type hints during pip build --- python/setup.py.in | 1 + 1 file changed, 1 insertion(+) diff --git a/python/setup.py.in b/python/setup.py.in index 824a6656e..b9d7392c7 100644 --- a/python/setup.py.in +++ b/python/setup.py.in @@ -13,6 +13,7 @@ package_data = { "./*.so", "./*.dll", "./*.pyd", + "*.pyi", "**/*.pyi", # Add the type hints ] } From 51aadbda3411411597d7f0d53bede212e898dc08 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 24 Jul 2022 22:25:56 -0400 Subject: [PATCH 128/398] add type hints and make command to test gtsam_unstable for python --- python/CMakeLists.txt | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index ba55ac2af..2068ee8dc 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -266,7 +266,9 @@ endif() # Add custom target so we can install with `make python-install` set(GTSAM_PYTHON_INSTALL_TARGET python-install) add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} - COMMAND ${PYTHON_EXECUTABLE} -m pip install --user . + COMMAND stubgen -q -p gtsam && + rsync -a "out/gtsam/" gtsam && + ${PYTHON_EXECUTABLE} -m pip install --user . DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY}) @@ -279,3 +281,12 @@ add_custom_target( ${PYTHON_EXECUTABLE} -m unittest discover -v -s . DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/gtsam/tests") + +add_custom_target( + python-test-unstable + COMMAND + ${CMAKE_COMMAND} -E env # add package to python path so no need to install + "PYTHONPATH=${GTSAM_PYTHON_BUILD_DIRECTORY}/$ENV{PYTHONPATH}" + ${PYTHON_EXECUTABLE} -m unittest discover -v -s . + DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} + WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/gtsam_unstable/tests") From 981ec728727c9e810b18d27c166763adaacdb7a5 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 25 Aug 2024 11:00:39 -0700 Subject: [PATCH 129/398] Remove unused variable --- gtsam/discrete/tests/testDiscreteConditional.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/discrete/tests/testDiscreteConditional.cpp b/gtsam/discrete/tests/testDiscreteConditional.cpp index 172dd0fa1..2482a86a2 100644 --- a/gtsam/discrete/tests/testDiscreteConditional.cpp +++ b/gtsam/discrete/tests/testDiscreteConditional.cpp @@ -292,7 +292,7 @@ TEST(DiscreteConditional, choose) { /* ************************************************************************* */ // Check argmax on P(C|D) and P(D), plus tie-breaking for P(B) TEST(DiscreteConditional, Argmax) { - DiscreteKey B(2, 2), C(2, 2), D(4, 2); + DiscreteKey C(2, 2), D(4, 2); DiscreteConditional B_prior(D, "1/1"); DiscreteConditional D_prior(D, "1/3"); DiscreteConditional C_given_D((C | D) = "1/4 1/1"); From 48215b9063045921bba4268db36e3ac8085687e3 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 14:03:43 -0400 Subject: [PATCH 130/398] remove duplicate target --- python/CMakeLists.txt | 9 --------- 1 file changed, 9 deletions(-) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 2068ee8dc..20f2eab4b 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -281,12 +281,3 @@ add_custom_target( ${PYTHON_EXECUTABLE} -m unittest discover -v -s . DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/gtsam/tests") - -add_custom_target( - python-test-unstable - COMMAND - ${CMAKE_COMMAND} -E env # add package to python path so no need to install - "PYTHONPATH=${GTSAM_PYTHON_BUILD_DIRECTORY}/$ENV{PYTHONPATH}" - ${PYTHON_EXECUTABLE} -m unittest discover -v -s . - DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} - WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/gtsam_unstable/tests") From e60477f3c7a65f163ce09002e7e4599c34e118dc Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 25 Aug 2024 11:39:04 -0700 Subject: [PATCH 131/398] Add M1 runner --- .github/workflows/build-python.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/build-python.yml b/.github/workflows/build-python.yml index 036717624..1012c7582 100644 --- a/.github/workflows/build-python.yml +++ b/.github/workflows/build-python.yml @@ -32,6 +32,7 @@ jobs: ubuntu-20.04-gcc-9-tbb, ubuntu-20.04-clang-9, macos-12-xcode-14.2, + macos-14-xcode-15.4, windows-2022-msbuild, ] @@ -59,6 +60,11 @@ jobs: compiler: xcode version: "14.2" + - name: macos-14-xcode-15.4 + os: macos-14 + compiler: xcode + version: "15.4" + - name: windows-2022-msbuild os: windows-2022 platform: 64 From f2d69ed697b047e8fa5c89cc4613ffcee89c3c88 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 14:47:14 -0400 Subject: [PATCH 132/398] always use bundled pybind11 inside wrap --- python/CMakeLists.txt | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index ba55ac2af..12d8a9b4d 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -29,11 +29,8 @@ if(POLICY CMP0057) cmake_policy(SET CMP0057 NEW) endif() -# Prefer system pybind11 first, if not found, rely on bundled version: -find_package(pybind11 CONFIG QUIET) -if (NOT pybind11_FOUND) - add_subdirectory(${PROJECT_SOURCE_DIR}/wrap/pybind11 pybind11) -endif() +# Use bundled pybind11 version +find_package(pybind11 CONFIG QUIET PATHS "${PROJECT_SOURCE_DIR}/wrap/pybind11") # Set the wrapping script variable set(PYBIND_WRAP_SCRIPT "${PROJECT_SOURCE_DIR}/wrap/scripts/pybind_wrap.py") From 1c8c1f2e799b3365475e18d4ede89806e04373d6 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 25 Aug 2024 11:55:06 -0700 Subject: [PATCH 133/398] Add venv on mac --- .github/workflows/build-python.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/build-python.yml b/.github/workflows/build-python.yml index 1012c7582..c5e63de9d 100644 --- a/.github/workflows/build-python.yml +++ b/.github/workflows/build-python.yml @@ -110,6 +110,10 @@ jobs: echo "CC=clang" >> $GITHUB_ENV echo "CXX=clang++" >> $GITHUB_ENV + python$PYTHON_VERSION -m venv venv + source venv/bin/activate + python -m pip install --upgrade pip + - name: Setup msbuild (Windows) if: runner.os == 'Windows' uses: ilammy/msvc-dev-cmd@v1 From c3503064c863815a80daf7b320168c4a0fa02949 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 25 Aug 2024 12:07:47 -0700 Subject: [PATCH 134/398] Try activating venv --- .github/workflows/build-python.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build-python.yml b/.github/workflows/build-python.yml index c5e63de9d..54ea4485c 100644 --- a/.github/workflows/build-python.yml +++ b/.github/workflows/build-python.yml @@ -174,7 +174,11 @@ jobs: - name: Install Python Dependencies shell: bash - run: python$PYTHON_VERSION -m pip install -r python/dev_requirements.txt + run: | + if [ "${{ runner.os }}" == "macOS" ]; then + source venv/bin/activate + fi + python$PYTHON_VERSION -m pip install -r python/dev_requirements.txt - name: Build shell: bash From 387349839b6537ff0dd814ccf673cb747e219b36 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 25 Aug 2024 12:22:08 -0700 Subject: [PATCH 135/398] Fix venv for all subsequent actions --- .github/workflows/build-python.yml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build-python.yml b/.github/workflows/build-python.yml index 54ea4485c..9a9d44f5c 100644 --- a/.github/workflows/build-python.yml +++ b/.github/workflows/build-python.yml @@ -112,6 +112,7 @@ jobs: python$PYTHON_VERSION -m venv venv source venv/bin/activate + echo "PATH=$(pwd)/venv/bin:$PATH" >> $GITHUB_ENV python -m pip install --upgrade pip - name: Setup msbuild (Windows) @@ -174,11 +175,7 @@ jobs: - name: Install Python Dependencies shell: bash - run: | - if [ "${{ runner.os }}" == "macOS" ]; then - source venv/bin/activate - fi - python$PYTHON_VERSION -m pip install -r python/dev_requirements.txt + run: python$PYTHON_VERSION -m pip install -r python/dev_requirements.txt - name: Build shell: bash From e58a5c4cac810db86c3f1a960e61db81446126b4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 15:41:55 -0400 Subject: [PATCH 136/398] directly add pybind11 subdirectory so files are generated correctly --- python/CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 12d8a9b4d..b8c2c718f 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -30,7 +30,7 @@ if(POLICY CMP0057) endif() # Use bundled pybind11 version -find_package(pybind11 CONFIG QUIET PATHS "${PROJECT_SOURCE_DIR}/wrap/pybind11") +add_subdirectory(${PROJECT_SOURCE_DIR}/wrap/pybind11 pybind11) # Set the wrapping script variable set(PYBIND_WRAP_SCRIPT "${PROJECT_SOURCE_DIR}/wrap/scripts/pybind_wrap.py") From fad8e63fcea9fc074c843e365409e6461fbc29d0 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 25 Aug 2024 13:02:37 -0700 Subject: [PATCH 137/398] try non-venv way --- .github/workflows/build-python.yml | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/.github/workflows/build-python.yml b/.github/workflows/build-python.yml index 9a9d44f5c..ed1b4cbdb 100644 --- a/.github/workflows/build-python.yml +++ b/.github/workflows/build-python.yml @@ -110,11 +110,6 @@ jobs: echo "CC=clang" >> $GITHUB_ENV echo "CXX=clang++" >> $GITHUB_ENV - python$PYTHON_VERSION -m venv venv - source venv/bin/activate - echo "PATH=$(pwd)/venv/bin:$PATH" >> $GITHUB_ENV - python -m pip install --upgrade pip - - name: Setup msbuild (Windows) if: runner.os == 'Windows' uses: ilammy/msvc-dev-cmd@v1 @@ -175,7 +170,7 @@ jobs: - name: Install Python Dependencies shell: bash - run: python$PYTHON_VERSION -m pip install -r python/dev_requirements.txt + run: python$PYTHON_VERSION -m pip install --break-system-packages --user -r python/dev_requirements.txt - name: Build shell: bash From 9dbbb328142c9b9bfa6c99255843fcbb1b0686a5 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 25 Aug 2024 13:17:16 -0700 Subject: [PATCH 138/398] Add dangerous flag --- python/CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index ba55ac2af..8fd092e8d 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -266,7 +266,7 @@ endif() # Add custom target so we can install with `make python-install` set(GTSAM_PYTHON_INSTALL_TARGET python-install) add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} - COMMAND ${PYTHON_EXECUTABLE} -m pip install --user . + COMMAND ${PYTHON_EXECUTABLE} -m pip install --break-system-packages --user . DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY}) From b9e68ec79b0d0224464b33332efd5b818961018d Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 25 Aug 2024 13:17:28 -0700 Subject: [PATCH 139/398] Add M1 build --- .github/workflows/build-macos.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/build-macos.yml b/.github/workflows/build-macos.yml index d75042ae8..e4c78bf67 100644 --- a/.github/workflows/build-macos.yml +++ b/.github/workflows/build-macos.yml @@ -26,6 +26,7 @@ jobs: # See https://help.github.com/en/articles/workflow-syntax-for-github-actions. name: [ macos-12-xcode-14.2, + macos-14-xcode-15.4, ] build_type: [Debug, Release] @@ -36,6 +37,11 @@ jobs: compiler: xcode version: "14.2" + - name: macos-14-xcode-15.4 + os: macos-14 + compiler: xcode + version: "15.4" + steps: - name: Checkout uses: actions/checkout@v4 From bde9285211b1882865a59f870eb3a8f1bb6d2395 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 16:42:23 -0400 Subject: [PATCH 140/398] better test documentation --- tests/testDoglegOptimizer.cpp | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/testDoglegOptimizer.cpp b/tests/testDoglegOptimizer.cpp index d7de63d3d..6fbc522d3 100644 --- a/tests/testDoglegOptimizer.cpp +++ b/tests/testDoglegOptimizer.cpp @@ -184,6 +184,14 @@ TEST(DoglegOptimizer, Constraint) { } /* ************************************************************************* */ +/** + * Test created to fix issue in ISAM2 when using the DogLegOptimizer. + * Originally reported by kvmanohar22 in issue #301 + * https://github.com/borglab/gtsam/issues/301 + * + * This test is based on a script provided by kvmanohar22 + * to help reproduce the issue. + */ TEST(DogLegOptimizer, VariableUpdate) { // Make the typename short so it looks much cleaner typedef SmartProjectionPoseFactor SmartFactor; From 5f43b69c4371ca8d784378d11256f25412874a81 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 16:45:23 -0400 Subject: [PATCH 141/398] add mypy to the dev_requirements so we can get stubgen --- python/dev_requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/python/dev_requirements.txt b/python/dev_requirements.txt index 6970ee613..c76d3bf1b 100644 --- a/python/dev_requirements.txt +++ b/python/dev_requirements.txt @@ -1,2 +1,3 @@ -r requirements.txt -pyparsing>=2.4.2 \ No newline at end of file +pyparsing>=2.4.2 +mypy>=1.11.2 \ No newline at end of file From 9307536827bd44d7318cb42b2e873ce5dcca598d Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 25 Aug 2024 13:49:43 -0700 Subject: [PATCH 142/398] Add venv on Mac and do not use --user flag in that case --- .github/workflows/build-python.yml | 10 +++++++++- python/CMakeLists.txt | 2 +- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-python.yml b/.github/workflows/build-python.yml index ed1b4cbdb..cb465309e 100644 --- a/.github/workflows/build-python.yml +++ b/.github/workflows/build-python.yml @@ -168,9 +168,17 @@ jobs: run: | bash .github/scripts/python.sh -d + - name: Create virtual on MacOS + if: runner.os == 'macOS' + run: | + python$PYTHON_VERSION -m venv venv + source venv/bin/activate + echo "PATH=$(pwd)/venv/bin:$PATH" >> $GITHUB_ENV + python -m pip install --upgrade pip + - name: Install Python Dependencies shell: bash - run: python$PYTHON_VERSION -m pip install --break-system-packages --user -r python/dev_requirements.txt + run: python$PYTHON_VERSION -m pip install -r python/dev_requirements.txt - name: Build shell: bash diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 8fd092e8d..e13103839 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -266,7 +266,7 @@ endif() # Add custom target so we can install with `make python-install` set(GTSAM_PYTHON_INSTALL_TARGET python-install) add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} - COMMAND ${PYTHON_EXECUTABLE} -m pip install --break-system-packages --user . + COMMAND ${PYTHON_EXECUTABLE} -m pip install $(if [ -z "$VIRTUAL_ENV" ]; then echo "--user"; fi) . DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY}) From d1d6942bde767ec7521bccfa7d410f1da3323925 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 25 Aug 2024 14:40:44 -0700 Subject: [PATCH 143/398] Correct cmake line --- python/CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index e13103839..e908d4671 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -266,7 +266,7 @@ endif() # Add custom target so we can install with `make python-install` set(GTSAM_PYTHON_INSTALL_TARGET python-install) add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} - COMMAND ${PYTHON_EXECUTABLE} -m pip install $(if [ -z "$VIRTUAL_ENV" ]; then echo "--user"; fi) . + COMMAND ${PYTHON_EXECUTABLE} -m pip install `if [ -z "$VIRTUAL_ENV" ]; then echo --user; fi` . DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY}) From 0ba23ccbaa0777c7f912b5c93ed25463e801eb1a Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 25 Aug 2024 15:31:25 -0700 Subject: [PATCH 144/398] Try w cross-platform install --- python/CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index e908d4671..2151557b6 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -266,7 +266,7 @@ endif() # Add custom target so we can install with `make python-install` set(GTSAM_PYTHON_INSTALL_TARGET python-install) add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} - COMMAND ${PYTHON_EXECUTABLE} -m pip install `if [ -z "$VIRTUAL_ENV" ]; then echo --user; fi` . + COMMAND ${PYTHON_EXECUTABLE} -c "import sys, subprocess; cmd = [sys.executable, '-m', 'pip', 'install']; if not hasattr(sys, 'real_prefix') and not (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix): cmd.append('--user'); cmd.append('.'); subprocess.check_call(cmd)" DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY}) From ce74b2b0c195c34dd8a1c6ef7705b0a2b1af3881 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 25 Aug 2024 15:47:57 -0700 Subject: [PATCH 145/398] Elaborate solution that works on windows as well --- python/CMakeLists.txt | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 2151557b6..7e270e4e8 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -264,11 +264,13 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON) endif() # Add custom target so we can install with `make python-install` +# Note below we make sure to install with --user iff not in a virtualenv set(GTSAM_PYTHON_INSTALL_TARGET python-install) add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} - COMMAND ${PYTHON_EXECUTABLE} -c "import sys, subprocess; cmd = [sys.executable, '-m', 'pip', 'install']; if not hasattr(sys, 'real_prefix') and not (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix): cmd.append('--user'); cmd.append('.'); subprocess.check_call(cmd)" + COMMAND ${PYTHON_EXECUTABLE} -c "import sys, subprocess; cmd = [sys.executable, '-m', 'pip', 'install']; has_venv = hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix); cmd.append('--user' if not has_venv else ''); cmd.append('.'); subprocess.check_call([c for c in cmd if c])" DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} - WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY}) + WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY} + VERBATIM) # Custom make command to run all GTSAM Python tests add_custom_target( From 0e73367345f50f9c3a0a7f6e3beb7be124d99f23 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 18:57:03 -0400 Subject: [PATCH 146/398] stubgen only for Windows until we can find the rsync equivalent --- python/CMakeLists.txt | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 20f2eab4b..b32d51c68 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -265,12 +265,18 @@ endif() # Add custom target so we can install with `make python-install` set(GTSAM_PYTHON_INSTALL_TARGET python-install) -add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} - COMMAND stubgen -q -p gtsam && - rsync -a "out/gtsam/" gtsam && - ${PYTHON_EXECUTABLE} -m pip install --user . +if (NOT WIN32) + add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} + COMMAND stubgen -q -p gtsam && rsync -a \"out/gtsam/\" gtsam && ${PYTHON_EXECUTABLE} -m pip install --user . DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY}) +elseif() + #TODO(Varun) Find rsync equivalent on Windows + add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} + COMMAND ${PYTHON_EXECUTABLE} -m pip install --user . + DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} + WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY}) +endif() # Custom make command to run all GTSAM Python tests add_custom_target( From bfcd5eb08a9feea9f84b079878396afae63e5b54 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 26 Aug 2024 05:48:18 -0400 Subject: [PATCH 147/398] use older version of mypy --- python/dev_requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/dev_requirements.txt b/python/dev_requirements.txt index c76d3bf1b..6a5e7f924 100644 --- a/python/dev_requirements.txt +++ b/python/dev_requirements.txt @@ -1,3 +1,3 @@ -r requirements.txt pyparsing>=2.4.2 -mypy>=1.11.2 \ No newline at end of file +mypy==1.4.1 #TODO(Varun) A bug in mypy>=1.5.0 causes an unresolved placeholder error when importing numpy>=2.0.0 (https://github.com/python/mypy/issues/17396) \ No newline at end of file From da99cf0e192ca4033d34575714b7591f343258ff Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 26 Aug 2024 06:25:17 -0400 Subject: [PATCH 148/398] remove extraneous variable --- python/CMakeLists.txt | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index b32d51c68..677ee72ce 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -264,15 +264,14 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON) endif() # Add custom target so we can install with `make python-install` -set(GTSAM_PYTHON_INSTALL_TARGET python-install) if (NOT WIN32) - add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} + add_custom_target(python-install COMMAND stubgen -q -p gtsam && rsync -a \"out/gtsam/\" gtsam && ${PYTHON_EXECUTABLE} -m pip install --user . DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY}) elseif() #TODO(Varun) Find rsync equivalent on Windows - add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} + add_custom_target(python-install COMMAND ${PYTHON_EXECUTABLE} -m pip install --user . DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY}) From 48f9d0b11664b5e69e4f8cd925851e3206ecb5af Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 26 Aug 2024 09:44:28 -0400 Subject: [PATCH 149/398] fix if-else mistype --- python/CMakeLists.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 677ee72ce..1e4acf142 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -264,12 +264,12 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON) endif() # Add custom target so we can install with `make python-install` -if (NOT WIN32) +if (NOT WIN32) # WIN32=1 is target platform is Windows add_custom_target(python-install COMMAND stubgen -q -p gtsam && rsync -a \"out/gtsam/\" gtsam && ${PYTHON_EXECUTABLE} -m pip install --user . DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY}) -elseif() +else() #TODO(Varun) Find rsync equivalent on Windows add_custom_target(python-install COMMAND ${PYTHON_EXECUTABLE} -m pip install --user . From 1744c4aeb3b8875b8c17651905eebb4c7687d4ef Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 26 Aug 2024 10:07:24 -0400 Subject: [PATCH 150/398] save stubs to 'stubs' directory and use cp instead of rsync --- python/CMakeLists.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 1e4acf142..bdab178ba 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -266,11 +266,11 @@ endif() # Add custom target so we can install with `make python-install` if (NOT WIN32) # WIN32=1 is target platform is Windows add_custom_target(python-install - COMMAND stubgen -q -p gtsam && rsync -a \"out/gtsam/\" gtsam && ${PYTHON_EXECUTABLE} -m pip install --user . + COMMAND stubgen -q -p gtsam -o ./stubs && cp -a stubs/gtsam/ gtsam && ${PYTHON_EXECUTABLE} -m pip install --user . DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY}) else() - #TODO(Varun) Find rsync equivalent on Windows + #TODO(Varun) Find equivalent cp on Windows add_custom_target(python-install COMMAND ${PYTHON_EXECUTABLE} -m pip install --user . DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} From f4830baa5e9e319259f17d31ae002e15904744b0 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 26 Aug 2024 17:27:44 -0400 Subject: [PATCH 151/398] common logProbability normalization function --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 43 ++++++++++++++++------ 1 file changed, 31 insertions(+), 12 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index cb8ceed20..bbf739c65 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -233,6 +233,26 @@ continuousElimination(const HybridGaussianFactorGraph &factors, return {std::make_shared(result.first), result.second}; } +/* ************************************************************************ */ +/** + * @brief Exponential log-probabilities after performing + * the necessary normalizations. + * + * @param logProbabilities DecisionTree of log-probabilities. + * @return AlgebraicDecisionTree + */ +static AlgebraicDecisionTree exponentiateLogProbabilities( + const AlgebraicDecisionTree &logProbabilities) { + // Perform normalization + double max_log = logProbabilities.max(); + AlgebraicDecisionTree probabilities = DecisionTree( + logProbabilities, + [&max_log](const double x) { return exp(x - max_log); }); + probabilities = probabilities.normalize(probabilities.sum()); + + return probabilities; +} + /* ************************************************************************ */ static std::pair> discreteElimination(const HybridGaussianFactorGraph &factors, @@ -245,14 +265,18 @@ discreteElimination(const HybridGaussianFactorGraph &factors, } else if (auto gmf = dynamic_pointer_cast(f)) { // Case where we have a GaussianMixtureFactor with no continuous keys. // In this case, compute discrete probabilities. - auto probability = + auto logProbability = [&](const GaussianFactor::shared_ptr &factor) -> double { if (!factor) return 0.0; - return exp(-factor->error(VectorValues())); + return -factor->error(VectorValues()); }; - dfg.emplace_shared( - gmf->discreteKeys(), - DecisionTree(gmf->factors(), probability)); + AlgebraicDecisionTree logProbabilities = + DecisionTree(gmf->factors(), logProbability); + + AlgebraicDecisionTree probabilities = + exponentiateLogProbabilities(logProbabilities); + dfg.emplace_shared(gmf->discreteKeys(), + probabilities); } else if (auto orphan = dynamic_pointer_cast(f)) { // Ignore orphaned clique. @@ -315,13 +339,8 @@ static std::shared_ptr createDiscreteFactor( AlgebraicDecisionTree logProbabilities( DecisionTree(eliminationResults, logProbability)); - - // Perform normalization - double max_log = logProbabilities.max(); - AlgebraicDecisionTree probabilities = DecisionTree( - logProbabilities, - [&max_log](const double x) { return exp(x - max_log); }); - probabilities = probabilities.normalize(probabilities.sum()); + AlgebraicDecisionTree probabilities = + exponentiateLogProbabilities(logProbabilities); return std::make_shared(discreteSeparator, probabilities); } From 475b37f7d6aacf86e9624dd8ae97475396f581f0 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 26 Aug 2024 15:08:04 -0700 Subject: [PATCH 152/398] Fix tolerance for ubuntu quaternion cases --- gtsam/geometry/tests/testPose3.cpp | 2 +- gtsam/geometry/tests/testRot3.cpp | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/gtsam/geometry/tests/testPose3.cpp b/gtsam/geometry/tests/testPose3.cpp index 065d43bf9..c9851f761 100644 --- a/gtsam/geometry/tests/testPose3.cpp +++ b/gtsam/geometry/tests/testPose3.cpp @@ -1223,7 +1223,7 @@ TEST(Pose3, ExpmapChainRule) { // Test the derivatives at zero const Matrix6 expected = numericalDerivative11(g, Z_6x1); - EXPECT(assert_equal(expected, M)); // Pose3::ExpmapDerivative(Z_6x1) is identity + EXPECT(assert_equal(expected, M, 1e-5)); // Pose3::ExpmapDerivative(Z_6x1) is identity // Test the derivatives at another value const Vector6 delta{0.1, 0.2, 0.3, 0.4, 0.5, 0.6}; diff --git a/gtsam/geometry/tests/testRot3.cpp b/gtsam/geometry/tests/testRot3.cpp index 9555a2445..ce056edb6 100644 --- a/gtsam/geometry/tests/testRot3.cpp +++ b/gtsam/geometry/tests/testRot3.cpp @@ -958,7 +958,7 @@ TEST(Rot3, determinant) { /* ************************************************************************* */ TEST(Rot3, ExpmapChainRule) { - // Muliply with an arbitrary matrix and exponentiate + // Multiply with an arbitrary matrix and exponentiate Matrix3 M; M << 1, 2, 3, 4, 5, 6, 7, 8, 9; auto g = [&](const Vector3& omega) { @@ -967,17 +967,17 @@ TEST(Rot3, ExpmapChainRule) { // Test the derivatives at zero const Matrix3 expected = numericalDerivative11(g, Z_3x1); - EXPECT(assert_equal(expected, M)); // SO3::ExpmapDerivative(Z_3x1) is identity + EXPECT(assert_equal(expected, M, 1e-5)); // SO3::ExpmapDerivative(Z_3x1) is identity // Test the derivatives at another value const Vector3 delta{0.1,0.2,0.3}; const Matrix3 expected2 = numericalDerivative11(g, delta); - EXPECT(assert_equal(expected2, SO3::ExpmapDerivative(M*delta) * M)); + EXPECT(assert_equal(expected2, SO3::ExpmapDerivative(M*delta) * M, 1e-5)); } /* ************************************************************************* */ TEST(Rot3, expmapChainRule) { - // Muliply an arbitrary rotation with exp(M*x) + // Multiply an arbitrary rotation with exp(M*x) // Perhaps counter-intuitively, this has the same derivatives as above Matrix3 M; M << 1, 2, 3, 4, 5, 6, 7, 8, 9; @@ -988,12 +988,12 @@ TEST(Rot3, expmapChainRule) { // Test the derivatives at zero const Matrix3 expected = numericalDerivative11(g, Z_3x1); - EXPECT(assert_equal(expected, M)); + EXPECT(assert_equal(expected, M, 1e-5)); // Test the derivatives at another value const Vector3 delta{0.1,0.2,0.3}; const Matrix3 expected2 = numericalDerivative11(g, delta); - EXPECT(assert_equal(expected2, SO3::ExpmapDerivative(M*delta) * M)); + EXPECT(assert_equal(expected2, SO3::ExpmapDerivative(M*delta) * M, 1e-5)); } /* ************************************************************************* */ From 8d3bdc0578c1694a6a2fbe4024d1a63d9052fb5d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 26 Aug 2024 19:47:09 -0400 Subject: [PATCH 153/398] python package generation with stubs working --- python/CMakeLists.txt | 26 ++++++++++++-------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 9bbde7023..43f8d78bd 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -265,24 +265,22 @@ endif() # Add custom target so we can install with `make python-install` # Note below we make sure to install with --user iff not in a virtualenv set(GTSAM_PYTHON_INSTALL_TARGET python-install) -add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} +#TODO(Varun) Maybe move the long command to script? +# https://stackoverflow.com/questions/49053544/how-do-i-run-a-python-script-every-time-in-a-cmake-build +if (NOT WIN32) # WIN32=1 is target platform is Windows + add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} + COMMAND stubgen -q -p gtsam && cp -a out/gtsam/ gtsam && ${PYTHON_EXECUTABLE} -c "import sys, subprocess; cmd = [sys.executable, '-m', 'pip', 'install']; has_venv = hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix); cmd.append('--user' if not has_venv else ''); cmd.append('.'); subprocess.check_call([c for c in cmd if c])" + DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} + WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY} + VERBATIM) +else() + #TODO(Varun) Find equivalent cp on Windows + add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} COMMAND ${PYTHON_EXECUTABLE} -c "import sys, subprocess; cmd = [sys.executable, '-m', 'pip', 'install']; has_venv = hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix); cmd.append('--user' if not has_venv else ''); cmd.append('.'); subprocess.check_call([c for c in cmd if c])" DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY} VERBATIM) - -# if (NOT WIN32) # WIN32=1 is target platform is Windows -# add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} -# COMMAND stubgen -q -p gtsam -o ./stubs && cp -a stubs/gtsam/ gtsam && ${PYTHON_EXECUTABLE} -m pip install --user . -# DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} -# WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY}) -# else() -# #TODO(Varun) Find equivalent cp on Windows -# add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} -# COMMAND ${PYTHON_EXECUTABLE} -m pip install --user . -# DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} -# WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY}) -# endif() +endif() # Custom make command to run all GTSAM Python tests From e81272b078ea36f0e6cca1f77d28fc3102afba2f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 27 Aug 2024 09:55:05 -0400 Subject: [PATCH 154/398] fix comments --- gtsam/hybrid/tests/testGaussianMixtureFactor.cpp | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 0910d2f40..4c293c2b9 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -251,8 +251,8 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { * Gaussian distribution around which we sample z. * * The resulting factor graph should eliminate to a Bayes net - * which represents a sigmoid function leaning towards - * the tighter covariance Gaussian. + * which represents a Gaussian-like function + * where m1>m0 close to 3.1333. */ TEST(GaussianMixtureFactor, GaussianMixtureModel2) { double mu0 = 1.0, mu1 = 3.0; @@ -272,17 +272,16 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel2) { hbn.emplace_back(gm); hbn.emplace_back(mixing); - // The result should be a sigmoid leaning towards model1 - // since it has the tighter covariance. - // So should be m = 0.34/0.66 at z=3.0 - 1.0=2.0 + // The result should be a bell curve like function + // with m1 > m0 close to 3.1333. VectorValues given; - given.insert(z, Vector1(mu1 - mu0)); + given.insert(z, Vector1(3.133)); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); HybridBayesNet expected; expected.emplace_back( - new DiscreteConditional(m, "0.338561851224/0.661438148776")); + new DiscreteConditional(m, "0.325603277954/0.674396722046")); EXPECT(assert_equal(expected, *bn)); } From 25a5f810703ed7f601c1a72152614d07e239f58c Mon Sep 17 00:00:00 2001 From: Joel Truher Date: Wed, 28 Aug 2024 08:18:53 -0700 Subject: [PATCH 155/398] add example CameraResectioning.py --- python/gtsam/examples/CameraResectioning.py | 85 +++++++++++++++++++++ 1 file changed, 85 insertions(+) create mode 100644 python/gtsam/examples/CameraResectioning.py diff --git a/python/gtsam/examples/CameraResectioning.py b/python/gtsam/examples/CameraResectioning.py new file mode 100644 index 000000000..e962b40bb --- /dev/null +++ b/python/gtsam/examples/CameraResectioning.py @@ -0,0 +1,85 @@ +# pylint: disable=consider-using-from-import,invalid-name,no-name-in-module,no-member,missing-function-docstring +""" +This is a 1:1 transcription of CameraResectioning.cpp. +""" +import numpy as np +from gtsam import Cal3_S2, CustomFactor, LevenbergMarquardtOptimizer, KeyVector +from gtsam import NonlinearFactor, NonlinearFactorGraph +from gtsam import PinholeCameraCal3_S2, Point2, Point3, Pose3, Rot3, Values +from gtsam.noiseModel import Base as SharedNoiseModel, Diagonal +from gtsam.symbol_shorthand import X + + +def resectioning_factor( + model: SharedNoiseModel, + key: int, + calib: Cal3_S2, + p: Point2, + P: Point3, +) -> NonlinearFactor: + + def error_func(this: CustomFactor, v: Values, H: list[np.ndarray]) -> np.ndarray: + pose = v.atPose3(this.keys()[0]) + camera = PinholeCameraCal3_S2(pose, calib) + if H is None: + return camera.project(P) - p + Dpose = np.zeros((2, 6), order="F") + Dpoint = np.zeros((2, 3), order="F") + Dcal = np.zeros((2, 5), order="F") + result = camera.project(P, Dpose, Dpoint, Dcal) - p + H[0] = Dpose + return result + + return CustomFactor(model, KeyVector([key]), error_func) + + +def main() -> None: + """ + Camera: f = 1, Image: 100x100, center: 50, 50.0 + Pose (ground truth): (Xw, -Yw, -Zw, [0,0,2.0]') + Known landmarks: + 3D Points: (10,10,0) (-10,10,0) (-10,-10,0) (10,-10,0) + Perfect measurements: + 2D Point: (55,45) (45,45) (45,55) (55,55) + """ + + # read camera intrinsic parameters + calib = Cal3_S2(1, 1, 0, 50, 50) + + # 1. create graph + graph = NonlinearFactorGraph() + + # 2. add factors to the graph + measurement_noise = Diagonal.Sigmas(np.array([0.5, 0.5])) + graph.add( + resectioning_factor( + measurement_noise, X(1), calib, Point2(55, 45), Point3(10, 10, 0) + ) + ) + graph.add( + resectioning_factor( + measurement_noise, X(1), calib, Point2(45, 45), Point3(-10, 10, 0) + ) + ) + graph.add( + resectioning_factor( + measurement_noise, X(1), calib, Point2(45, 55), Point3(-10, -10, 0) + ) + ) + graph.add( + resectioning_factor( + measurement_noise, X(1), calib, Point2(55, 55), Point3(10, -10, 0) + ) + ) + + # 3. Create an initial estimate for the camera pose + initial: Values = Values() + initial.insert(X(1), Pose3(Rot3(1, 0, 0, 0, -1, 0, 0, 0, -1), Point3(0, 0, 1))) + + # 4. Optimize the graph using Levenberg-Marquardt + result: Values = LevenbergMarquardtOptimizer(graph, initial).optimize() + result.print("Final result:\n") + + +if __name__ == "__main__": + main() From 3dab868ef0cf2e8c8093366d1afaf06ea19d8635 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 28 Aug 2024 12:47:24 -0400 Subject: [PATCH 156/398] rename from exponentiateLogProbabilities to probabilitiesFromLogValues --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index bbf739c65..2c5d239f5 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -235,19 +235,18 @@ continuousElimination(const HybridGaussianFactorGraph &factors, /* ************************************************************************ */ /** - * @brief Exponential log-probabilities after performing - * the necessary normalizations. + * @brief Exponentiate log-values, not necessarily normalized, normalize, and + * return as AlgebraicDecisionTree. * - * @param logProbabilities DecisionTree of log-probabilities. + * @param logValues DecisionTree of (unnormalized) log values. * @return AlgebraicDecisionTree */ -static AlgebraicDecisionTree exponentiateLogProbabilities( - const AlgebraicDecisionTree &logProbabilities) { +static AlgebraicDecisionTree probabilitiesFromLogValues( + const AlgebraicDecisionTree &logValues) { // Perform normalization - double max_log = logProbabilities.max(); + double max_log = logValues.max(); AlgebraicDecisionTree probabilities = DecisionTree( - logProbabilities, - [&max_log](const double x) { return exp(x - max_log); }); + logValues, [&max_log](const double x) { return exp(x - max_log); }); probabilities = probabilities.normalize(probabilities.sum()); return probabilities; @@ -274,7 +273,7 @@ discreteElimination(const HybridGaussianFactorGraph &factors, DecisionTree(gmf->factors(), logProbability); AlgebraicDecisionTree probabilities = - exponentiateLogProbabilities(logProbabilities); + probabilitiesFromLogValues(logProbabilities); dfg.emplace_shared(gmf->discreteKeys(), probabilities); @@ -340,7 +339,7 @@ static std::shared_ptr createDiscreteFactor( AlgebraicDecisionTree logProbabilities( DecisionTree(eliminationResults, logProbability)); AlgebraicDecisionTree probabilities = - exponentiateLogProbabilities(logProbabilities); + probabilitiesFromLogValues(logProbabilities); return std::make_shared(discreteSeparator, probabilities); } From 088f1f04bb6a0e27c1dfa9feacefd3bda05cc6ba Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 28 Aug 2024 13:18:55 -0400 Subject: [PATCH 157/398] improve the gaussian mixture model tests --- .../tests/testGaussianMixtureFactor.cpp | 113 +++++++++++++----- 1 file changed, 85 insertions(+), 28 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 4c293c2b9..4d333f2c3 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -200,6 +200,42 @@ TEST(GaussianMixtureFactor, Error) { 4.0, mixtureFactor.error({continuousValues, discreteValues}), 1e-9); } +namespace test_gmm { + +/** + * Function to compute P(m=1|z). For P(m=0|z), swap `mus and sigmas. + * Follows equation 7.108 since it is more generic. + */ +double sigmoid(double mu0, double mu1, double sigma0, double sigma1, double z) { + double x1 = ((z - mu0) / sigma0), x2 = ((z - mu1) / sigma1); + double d = std::sqrt(sigma0 * sigma0) / std::sqrt(sigma1 * sigma1); + double e = d * std::exp(-0.5 * (x1 * x1 - x2 * x2)); + return 1 / (1 + e); +}; + +HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, double sigma0, + double sigma1) { + DiscreteKey m(M(0), 2); + Key z = Z(0); + + auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); + auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); + + auto c0 = make_shared(z, Vector1(mu0), I_1x1, model0), + c1 = make_shared(z, Vector1(mu1), I_1x1, model1); + + auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1}); + auto mixing = new DiscreteConditional(m, "0.5/0.5"); + + HybridBayesNet hbn; + hbn.emplace_back(gm); + hbn.emplace_back(mixing); + + return hbn; +} + +} // namespace test_gmm + /* ************************************************************************* */ /** * Test a simple Gaussian Mixture Model represented as P(m)P(z|m) @@ -211,6 +247,8 @@ TEST(GaussianMixtureFactor, Error) { * which represents a sigmoid function. */ TEST(GaussianMixtureFactor, GaussianMixtureModel) { + using namespace test_gmm; + double mu0 = 1.0, mu1 = 3.0; double sigma = 2.0; auto model = noiseModel::Isotropic::Sigma(1, sigma); @@ -218,28 +256,52 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { DiscreteKey m(M(0), 2); Key z = Z(0); - auto c0 = make_shared(z, Vector1(mu0), I_1x1, model), - c1 = make_shared(z, Vector1(mu1), I_1x1, model); - - auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1}); - auto mixing = new DiscreteConditional(m, "0.5/0.5"); - - HybridBayesNet hbn; - hbn.emplace_back(gm); - hbn.emplace_back(mixing); + auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma, sigma); // The result should be a sigmoid. - // So should be m = 0.5 at z=3.0 - 1.0=2.0 - VectorValues given; - given.insert(z, Vector1(mu1 - mu0)); + // So should be P(m=1|z) = 0.5 at z=3.0 - 1.0=2.0 + double midway = mu1 - mu0, lambda = 4; + { + VectorValues given; + given.insert(z, Vector1(midway)); - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - HybridBayesNet expected; - expected.emplace_back(new DiscreteConditional(m, "0.5/0.5")); + EXPECT_DOUBLES_EQUAL( + sigmoid(mu0, mu1, sigma, sigma, midway), + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{M(0), 1}}), 1e-8); - EXPECT(assert_equal(expected, *bn)); + // At the halfway point between the means, we should get P(m|z)=0.5 + HybridBayesNet expected; + expected.emplace_back(new DiscreteConditional(m, "0.5/0.5")); + + EXPECT(assert_equal(expected, *bn)); + } + { + // Shift by -lambda + VectorValues given; + given.insert(z, Vector1(midway - lambda)); + + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + EXPECT_DOUBLES_EQUAL( + sigmoid(mu0, mu1, sigma, sigma, midway - lambda), + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{M(0), 1}}), 1e-8); + } + { + // Shift by lambda + VectorValues given; + given.insert(z, Vector1(midway + lambda)); + + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + EXPECT_DOUBLES_EQUAL( + sigmoid(mu0, mu1, sigma, sigma, midway + lambda), + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{M(0), 1}}), 1e-8); + } } /* ************************************************************************* */ @@ -255,30 +317,25 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { * where m1>m0 close to 3.1333. */ TEST(GaussianMixtureFactor, GaussianMixtureModel2) { + using namespace test_gmm; + double mu0 = 1.0, mu1 = 3.0; - auto model0 = noiseModel::Isotropic::Sigma(1, 8.0); - auto model1 = noiseModel::Isotropic::Sigma(1, 4.0); + double sigma0 = 8.0, sigma1 = 4.0; DiscreteKey m(M(0), 2); Key z = Z(0); - auto c0 = make_shared(z, Vector1(mu0), I_1x1, model0), - c1 = make_shared(z, Vector1(mu1), I_1x1, model1); - - auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1}); - auto mixing = new DiscreteConditional(m, "0.5/0.5"); - - HybridBayesNet hbn; - hbn.emplace_back(gm); - hbn.emplace_back(mixing); + auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma0, sigma1); // The result should be a bell curve like function // with m1 > m0 close to 3.1333. + // We get 3.1333 by finding the maximum value of the function. VectorValues given; given.insert(z, Vector1(3.133)); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + // regression HybridBayesNet expected; expected.emplace_back( new DiscreteConditional(m, "0.325603277954/0.674396722046")); From bb77b0cabbbd9ceb433d80c62950d4cf509bc979 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 28 Aug 2024 17:55:49 -0400 Subject: [PATCH 158/398] improved two state model with different means --- .../tests/testGaussianMixtureFactor.cpp | 118 ++++++++++++------ 1 file changed, 83 insertions(+), 35 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 4d333f2c3..8b7b5c146 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -36,6 +36,7 @@ using namespace std; using namespace gtsam; using noiseModel::Isotropic; +using symbol_shorthand::F; using symbol_shorthand::M; using symbol_shorthand::X; using symbol_shorthand::Z; @@ -270,7 +271,8 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { EXPECT_DOUBLES_EQUAL( sigmoid(mu0, mu1, sigma, sigma, midway), - bn->at(0)->asDiscrete()->operator()(DiscreteValues{{M(0), 1}}), 1e-8); + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), + 1e-8); // At the halfway point between the means, we should get P(m|z)=0.5 HybridBayesNet expected; @@ -288,7 +290,8 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { EXPECT_DOUBLES_EQUAL( sigmoid(mu0, mu1, sigma, sigma, midway - lambda), - bn->at(0)->asDiscrete()->operator()(DiscreteValues{{M(0), 1}}), 1e-8); + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), + 1e-8); } { // Shift by lambda @@ -300,7 +303,8 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { EXPECT_DOUBLES_EQUAL( sigmoid(mu0, mu1, sigma, sigma, midway + lambda), - bn->at(0)->asDiscrete()->operator()(DiscreteValues{{M(0), 1}}), 1e-8); + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), + 1e-8); } } @@ -343,81 +347,124 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel2) { EXPECT(assert_equal(expected, *bn)); } +namespace test_two_state_estimation { + +/// Create Two State Bayes Network with measurements +HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, + double sigma1, + bool add_second_measurement = false, + double prior_sigma = 1e-3, + double measurement_sigma = 3.0) { + DiscreteKey m1(M(1), 2); + Key z0 = Z(0), z1 = Z(1), f01 = F(0); + Key x0 = X(0), x1 = X(1); + + HybridBayesNet hbn; + + auto prior_model = noiseModel::Isotropic::Sigma(1, prior_sigma); + auto measurement_model = noiseModel::Isotropic::Sigma(1, measurement_sigma); + + // Set a prior P(x0) at x0=0 + hbn.emplace_back( + new GaussianConditional(x0, Vector1(0.0), I_1x1, prior_model)); + + // Add measurement P(z0 | x0) + auto p_z0 = new GaussianConditional(z0, Vector1(0.0), -I_1x1, x0, I_1x1, + measurement_model); + hbn.emplace_back(p_z0); + + // Add hybrid motion model + auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); + auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); + auto c0 = make_shared(f01, Vector1(mu0), I_1x1, x1, + I_1x1, x0, -I_1x1, model0), + c1 = make_shared(f01, Vector1(mu1), I_1x1, x1, + I_1x1, x0, -I_1x1, model1); + + auto motion = new GaussianMixture({f01}, {x0, x1}, {m1}, {c0, c1}); + + hbn.emplace_back(motion); + + if (add_second_measurement) { + // Add second measurement + auto p_z1 = new GaussianConditional(z1, Vector1(0.0), -I_1x1, x1, I_1x1, + measurement_model); + hbn.emplace_back(p_z1); + } + + // Discrete uniform prior. + auto p_m1 = new DiscreteConditional(m1, "0.5/0.5"); + hbn.emplace_back(p_m1); + + return hbn; +} + +} // namespace test_two_state_estimation + /* ************************************************************************* */ /** - * Test a model P(x0)P(z0|x0)p(x1|m1)p(z1|x1)p(m1). + * Test a model P(x0)P(z0|x0)P(f01|x1,x0,m1)P(z1|x1)P(m1). * - * p(x1|m1) has different means and same covariance. + * P(f01|x1,x0,m1) has different means and same covariance. * * Converting to a factor graph gives us - * P(x0)ϕ(x0)P(x1|m1)ϕ(x1)P(m1) + * P(x0)ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) * * If we only have a measurement on z0, then - * the probability of x1 should be 0.5/0.5. + * the probability of m1 should be 0.5/0.5. * Getting a measurement on z1 gives use more information. */ TEST(GaussianMixtureFactor, TwoStateModel) { + using namespace test_two_state_estimation; + double mu0 = 1.0, mu1 = 3.0; - auto model = noiseModel::Isotropic::Sigma(1, 2.0); + double sigma = 2.0; DiscreteKey m1(M(1), 2); - Key z0 = Z(0), z1 = Z(1), x0 = X(0), x1 = X(1); + Key z0 = Z(0), z1 = Z(1), f01 = F(0); - auto c0 = make_shared(x1, Vector1(mu0), I_1x1, model), - c1 = make_shared(x1, Vector1(mu1), I_1x1, model); - - auto p_x0 = new GaussianConditional(x0, Vector1(0.0), I_1x1, - noiseModel::Isotropic::Sigma(1, 1.0)); - auto p_z0x0 = new GaussianConditional(z0, Vector1(0.0), I_1x1, x0, -I_1x1, - noiseModel::Isotropic::Sigma(1, 1.0)); - auto p_x1m1 = new GaussianMixture({x1}, {}, {m1}, {c0, c1}); - auto p_z1x1 = new GaussianConditional(z1, Vector1(0.0), I_1x1, x1, -I_1x1, - noiseModel::Isotropic::Sigma(1, 3.0)); - auto p_m1 = new DiscreteConditional(m1, "0.5/0.5"); - - HybridBayesNet hbn; - hbn.emplace_back(p_x0); - hbn.emplace_back(p_z0x0); - hbn.emplace_back(p_x1m1); - hbn.emplace_back(p_m1); + // Start with no measurement on x1, only on x0 + HybridBayesNet hbn = CreateBayesNet(mu0, mu1, sigma, sigma, false); VectorValues given; given.insert(z0, Vector1(0.5)); + // The motion model says we didn't move + given.insert(f01, Vector1(0.0)); { - // Start with no measurement on x1, only on x0 HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); // Since no measurement on x1, we hedge our bets DiscreteConditional expected(m1, "0.5/0.5"); - + // regression EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()))); } { // Now we add a measurement z1 on x1 - hbn.emplace_back(p_z1x1); + hbn = CreateBayesNet(mu0, mu1, sigma, sigma, true); + // If we see z1=2.2, discrete mode should say m1=1 given.insert(z1, Vector1(2.2)); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); // Since we have a measurement on z2, we get a definite result DiscreteConditional expected(m1, "0.4923083/0.5076917"); - + // regression EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); } } /* ************************************************************************* */ /** - * Test a model P(x0)P(z0|x0)p(x1|m1)p(z1|x1)p(m1). + * Test a model P(x0)P(z0|x0)P(f01|x1,x0,m1)P(z1|x1)P(m1). * - * p(x1|m1) has different means and different covariances. + * P(f01|x1,x0,m1) has different means and different covariances. * * Converting to a factor graph gives us - * P(x0)ϕ(x0)P(x1|m1)ϕ(x1)P(m1) + * P(x0)ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) * * If we only have a measurement on z0, then * the probability of x1 should be the ratio of covariances. @@ -425,8 +472,9 @@ TEST(GaussianMixtureFactor, TwoStateModel) { */ TEST(GaussianMixtureFactor, TwoStateModel2) { double mu0 = 1.0, mu1 = 3.0; - auto model0 = noiseModel::Isotropic::Sigma(1, 6.0); - auto model1 = noiseModel::Isotropic::Sigma(1, 4.0); + double sigma0 = 6.0, sigma1 = 4.0; + auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); + auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); DiscreteKey m1(M(1), 2); Key z0 = Z(0), z1 = Z(1), x0 = X(0), x1 = X(1); From 28f30a232d8b0fac02c4185bae2d1ece2600ddd2 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 28 Aug 2024 18:13:54 -0400 Subject: [PATCH 159/398] update up TwoStateModel test and remove DifferentMeans and DifferentCovariances for later --- .../tests/testGaussianMixtureFactor.cpp | 236 ++---------------- 1 file changed, 17 insertions(+), 219 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 8b7b5c146..aa2fe0a1d 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -428,7 +428,7 @@ TEST(GaussianMixtureFactor, TwoStateModel) { VectorValues given; given.insert(z0, Vector1(0.5)); - // The motion model says we didn't move + // The motion model measurement says we didn't move given.insert(f01, Vector1(0.0)); { @@ -467,258 +467,56 @@ TEST(GaussianMixtureFactor, TwoStateModel) { * P(x0)ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) * * If we only have a measurement on z0, then - * the probability of x1 should be the ratio of covariances. + * the P(m1) should be 0.5/0.5. * Getting a measurement on z1 gives use more information. */ TEST(GaussianMixtureFactor, TwoStateModel2) { + using namespace test_two_state_estimation; + double mu0 = 1.0, mu1 = 3.0; double sigma0 = 6.0, sigma1 = 4.0; auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); DiscreteKey m1(M(1), 2); - Key z0 = Z(0), z1 = Z(1), x0 = X(0), x1 = X(1); + Key z0 = Z(0), z1 = Z(1), f01 = F(0); - auto c0 = make_shared(x1, Vector1(mu0), I_1x1, model0), - c1 = make_shared(x1, Vector1(mu1), I_1x1, model1); - - auto p_x0 = new GaussianConditional(x0, Vector1(0.0), I_1x1, - noiseModel::Isotropic::Sigma(1, 1.0)); - auto p_z0x0 = new GaussianConditional(z0, Vector1(0.0), I_1x1, x0, -I_1x1, - noiseModel::Isotropic::Sigma(1, 1.0)); - auto p_x1m1 = new GaussianMixture({x1}, {}, {m1}, {c0, c1}); - auto p_z1x1 = new GaussianConditional(z1, Vector1(0.0), I_1x1, x1, -I_1x1, - noiseModel::Isotropic::Sigma(1, 3.0)); - auto p_m1 = new DiscreteConditional(m1, "0.5/0.5"); - - HybridBayesNet hbn; - hbn.emplace_back(p_x0); - hbn.emplace_back(p_z0x0); - hbn.emplace_back(p_x1m1); - hbn.emplace_back(p_m1); + // Start with no measurement on x1, only on x0 + HybridBayesNet hbn = CreateBayesNet(mu0, mu1, sigma0, sigma1, false); VectorValues given; given.insert(z0, Vector1(0.5)); + // The motion model measurement says we didn't move + given.insert(f01, Vector1(0.0)); { // Start with no measurement on x1, only on x0 HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - // Since no measurement on x1, we get the ratio of covariances. - DiscreteConditional expected(m1, "0.6/0.4"); - - EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()))); + // Since no measurement on x1, we a 50/50 probability + auto p_m = bn->at(2)->asDiscrete(); + EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()(DiscreteValues{{m1.first, 0}}), + 1e-9); + EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()(DiscreteValues{{m1.first, 1}}), + 1e-9); } { // Now we add a measurement z1 on x1 - hbn.emplace_back(p_z1x1); + hbn = CreateBayesNet(mu0, mu1, sigma0, sigma1, true); given.insert(z1, Vector1(2.2)); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); // Since we have a measurement on z2, we get a definite result - DiscreteConditional expected(m1, "0.52706646/0.47293354"); + DiscreteConditional expected(m1, "0.4262682/0.5737318"); EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); } } -/** - * @brief Helper function to specify a Hybrid Bayes Net - * {P(X1) P(Z1 | X1, X2, M1)} and convert it to a Hybrid Factor Graph - * {P(X1)L(X1, X2, M1; Z1)} by converting to likelihoods given Z1. - * - * We can specify either different means or different sigmas, - * or both for each hybrid factor component. - * - * @param values Initial values for linearization. - * @param means The mean values for the conditional components. - * @param sigmas Noise model sigma values (standard deviation). - * @param m1 The discrete mode key. - * @param z1 The measurement value. - * @return HybridGaussianFactorGraph - */ -HybridGaussianFactorGraph GetFactorGraphFromBayesNet( - const gtsam::Values &values, const std::vector &means, - const std::vector &sigmas, DiscreteKey &m1, double z1 = 0.0) { - // Noise models - auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); - auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - - // GaussianMixtureFactor component factors - auto f0 = - std::make_shared>(X(1), X(2), means[0], model0); - auto f1 = - std::make_shared>(X(1), X(2), means[1], model1); - std::vector factors{f0, f1}; - - /// Get terms for each p^m(z1 | x1, x2) - Matrix H0_1, H0_2, H1_1, H1_2; - double x1 = values.at(X(1)), x2 = values.at(X(2)); - Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); - std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H0_1 /*Sp1*/}, - {X(2), H0_2 /*Tp2*/}}; - - Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); - std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H1_1 /*Sp1*/}, - {X(2), H1_2 /*Tp2*/}}; - // Create conditional P(Z1 | X1, X2, M1) - auto gm = new gtsam::GaussianMixture( - {Z(1)}, {X(1), X(2)}, {m1}, - {std::make_shared(terms0, 1, -d0, model0), - std::make_shared(terms1, 1, -d1, model1)}); - gtsam::HybridBayesNet bn; - bn.emplace_back(gm); - // bn.print(); - - // Create FG via toFactorGraph - gtsam::VectorValues measurements; - measurements.insert(Z(1), gtsam::I_1x1 * z1); // Set Z1 = 0 - HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); - - // Linearized prior factor on X1 - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); - mixture_fg.push_back(prior); - - return mixture_fg; -} - -/* ************************************************************************* */ -/** - * @brief Test components with differing means. - * - * We specify a hybrid Bayes network P(Z | X, M) =p(X1)p(Z1 | X1, X2, M1), - * which is then converted to a factor graph by specifying Z1. - * This is a different case since now we have a hybrid factor - * with 2 continuous variables ϕ(x1, x2, m1). - * p(Z1 | X1, X2, M1) has 2 factors each for the binary - * mode m1, with only the means being different. - */ -TEST(GaussianMixtureFactor, DifferentMeans) { - DiscreteKey m1(M(1), 2); - - Values values; - double x1 = 0.0, x2 = 1.75; - values.insert(X(1), x1); - values.insert(X(2), x2); - - // Different means, same sigma - std::vector means{0.0, 2.0}, sigmas{1e-0, 1e-0}; - - HybridGaussianFactorGraph hfg = - GetFactorGraphFromBayesNet(values, means, sigmas, m1, 0.0); - - { - // With no measurement on X2, each mode should be equally likely - auto bn = hfg.eliminateSequential(); - HybridValues actual = bn->optimize(); - - HybridValues expected( - VectorValues{{X(1), Vector1(0.0)}, {X(2), Vector1(-1.75)}}, - DiscreteValues{{M(1), 0}}); - - EXPECT(assert_equal(expected, actual)); - - { - DiscreteValues dv{{M(1), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); - } - } - { - // If we add a measurement on X2, we have more information to work with. - // Add a measurement on X2 - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - GaussianConditional meas_z2(Z(2), Vector1(2.0), I_1x1, X(2), I_1x1, - prior_noise); - auto prior_x2 = meas_z2.likelihood(Vector1(x2)); - - hfg.push_back(prior_x2); - - auto bn = hfg.eliminateSequential(); - HybridValues actual = bn->optimize(); - - HybridValues expected( - VectorValues{{X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}}, - DiscreteValues{{M(1), 1}}); - - EXPECT(assert_equal(expected, actual)); - - { - DiscreteValues dv{{M(1), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(2.12692448787, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(0.126928487854, error, 1e-9); - } - } -} - -/* ************************************************************************* */ -/** - * @brief Test components with differing covariances - * but with a Bayes net P(Z|X, M) converted to a FG. - * Same as the DifferentMeans example but in this case, - * we keep the means the same and vary the covariances. - */ -TEST(GaussianMixtureFactor, DifferentCovariances) { - DiscreteKey m1(M(1), 2); - - Values values; - double x1 = 1.0, x2 = 1.0; - values.insert(X(1), x1); - values.insert(X(2), x2); - - std::vector means{0.0, 0.0}, sigmas{1e2, 1e-2}; - HybridGaussianFactorGraph mixture_fg = - GetFactorGraphFromBayesNet(values, means, sigmas, m1); - - auto hbn = mixture_fg.eliminateSequential(); - - VectorValues cv; - cv.insert(X(1), Vector1(0.0)); - cv.insert(X(2), Vector1(0.0)); - - // Check that the error values at the MLE point μ. - AlgebraicDecisionTree errorTree = hbn->errorTree(cv); - - DiscreteValues dv0{{M(1), 0}}; - DiscreteValues dv1{{M(1), 1}}; - - // regression - EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); - EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); - - DiscreteConditional expected_m1(m1, "0.5/0.5"); - DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); - - EXPECT(assert_equal(expected_m1, actual_m1)); -} - /* ************************************************************************* */ int main() { TestResult tr; From f8a7b804d3167196b7eed0f257a4e75545076ae8 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Aug 2024 10:45:50 -0400 Subject: [PATCH 160/398] address some comments --- gtsam/hybrid/tests/testGaussianMixtureFactor.cpp | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index aa2fe0a1d..3d26ae573 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -204,12 +204,12 @@ TEST(GaussianMixtureFactor, Error) { namespace test_gmm { /** - * Function to compute P(m=1|z). For P(m=0|z), swap `mus and sigmas. + * Function to compute P(m=1|z). For P(m=0|z), swap mus and sigmas. * Follows equation 7.108 since it is more generic. */ double sigmoid(double mu0, double mu1, double sigma0, double sigma1, double z) { double x1 = ((z - mu0) / sigma0), x2 = ((z - mu1) / sigma1); - double d = std::sqrt(sigma0 * sigma0) / std::sqrt(sigma1 * sigma1); + double d = sigma0 / sigma1; double e = d * std::exp(-0.5 * (x1 * x1 - x2 * x2)); return 1 / (1 + e); }; @@ -252,7 +252,6 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { double mu0 = 1.0, mu1 = 3.0; double sigma = 2.0; - auto model = noiseModel::Isotropic::Sigma(1, sigma); DiscreteKey m(M(0), 2); Key z = Z(0); @@ -382,7 +381,6 @@ HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, I_1x1, x0, -I_1x1, model1); auto motion = new GaussianMixture({f01}, {x0, x1}, {m1}, {c0, c1}); - hbn.emplace_back(motion); if (add_second_measurement) { @@ -487,7 +485,7 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { VectorValues given; given.insert(z0, Vector1(0.5)); // The motion model measurement says we didn't move - given.insert(f01, Vector1(0.0)); + // given.insert(x1, Vector1(0.0)); { // Start with no measurement on x1, only on x0 @@ -512,7 +510,7 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { // Since we have a measurement on z2, we get a definite result DiscreteConditional expected(m1, "0.4262682/0.5737318"); - + // regression EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); } } From 867f63d600f018324530bb9c48e731a55f445f62 Mon Sep 17 00:00:00 2001 From: Joel Truher Date: Thu, 29 Aug 2024 08:42:27 -0700 Subject: [PATCH 161/398] added SelfCalibrationExample.py --- .../gtsam/examples/SelfCalibrationExample.py | 122 ++++++++++++++++++ 1 file changed, 122 insertions(+) create mode 100644 python/gtsam/examples/SelfCalibrationExample.py diff --git a/python/gtsam/examples/SelfCalibrationExample.py b/python/gtsam/examples/SelfCalibrationExample.py new file mode 100644 index 000000000..a1919e14e --- /dev/null +++ b/python/gtsam/examples/SelfCalibrationExample.py @@ -0,0 +1,122 @@ +# pylint: disable=unused-import,consider-using-from-import,invalid-name,no-name-in-module,no-member,missing-function-docstring,too-many-locals +""" +Transcription of SelfCalibrationExample.cpp +""" +import math + +from gtsam import Cal3_S2 +from gtsam.noiseModel import Diagonal, Isotropic + +# SFM-specific factors +from gtsam import GeneralSFMFactor2Cal3_S2 # does calibration ! +from gtsam import PinholeCameraCal3_S2 + +# Camera observations of landmarks (i.e. pixel coordinates) will be stored as Point2 (x, y). +from gtsam import Point2 +from gtsam import Point3, Pose3, Rot3 + +# Inference and optimization +from gtsam import NonlinearFactorGraph, DoglegOptimizer, Values +from gtsam.symbol_shorthand import K, L, X + + +# this is a direct translation of examples/SFMData.h +# which is slightly different from python/gtsam/examples/SFMdata.py. +def createPoints() -> list[Point3]: + """ + Create the set of ground-truth landmarks + """ + return [ + Point3(10.0, 10.0, 10.0), + Point3(-10.0, 10.0, 10.0), + Point3(-10.0, -10.0, 10.0), + Point3(10.0, -10.0, 10.0), + Point3(10.0, 10.0, -10.0), + Point3(-10.0, 10.0, -10.0), + Point3(-10.0, -10.0, -10.0), + Point3(10.0, -10.0, -10.0), + ] + + +def createPoses( + init: Pose3 = Pose3(Rot3.Ypr(math.pi / 2, 0, -math.pi / 2), Point3(30, 0, 0)), + delta: Pose3 = Pose3( + Rot3.Ypr(0, -math.pi / 4, 0), + Point3(math.sin(math.pi / 4) * 30, 0, 30 * (1 - math.sin(math.pi / 4))), + ), + steps: int = 8, +) -> list[Pose3]: + """ + Create the set of ground-truth poses + Default values give a circular trajectory, + radius 30 at pi/4 intervals, always facing the circle center + """ + poses: list[Pose3] = [] + poses.append(init) + for i in range(1, steps): + poses.append(poses[i - 1].compose(delta)) + return poses + + +def main() -> None: + # Create the set of ground-truth + points: list[Point3] = createPoints() + poses: list[Pose3] = createPoses() + + # Create the factor graph + graph = NonlinearFactorGraph() + + # Add a prior on pose x1. + # 30cm std on x,y,z 0.1 rad on roll,pitch,yaw + poseNoise = Diagonal.Sigmas([0.1, 0.1, 0.1, 0.3, 0.3, 0.3]) + graph.addPriorPose3(X(0), poses[0], poseNoise) + + # Simulated measurements from each camera pose, adding them to the factor graph + Kcal = Cal3_S2(50.0, 50.0, 0.0, 50.0, 50.0) + measurementNoise = Isotropic.Sigma(2, 1.0) + for i, pose in enumerate(poses): + for j, point in enumerate(points): + camera = PinholeCameraCal3_S2(pose, Kcal) + measurement: Point2 = camera.project(point) + # The only real difference with the Visual SLAM example is that here we + # use a different factor type, that also calculates the Jacobian with + # respect to calibration + graph.add( + GeneralSFMFactor2Cal3_S2( + measurement, + measurementNoise, + X(i), + L(j), + K(0), + ) + ) + + # Add a prior on the position of the first landmark. + pointNoise = Isotropic.Sigma(3, 0.1) + graph.addPriorPoint3(L(0), points[0], pointNoise) # add directly to graph + + # Add a prior on the calibration. + calNoise = Diagonal.Sigmas([500, 500, 0.1, 100, 100]) + graph.addPriorCal3_S2(K(0), Kcal, calNoise) + + # Create the initial estimate to the solution + # now including an estimate on the camera calibration parameters + initialEstimate = Values() + initialEstimate.insert(K(0), Cal3_S2(60.0, 60.0, 0.0, 45.0, 45.0)) + for i, pose in enumerate(poses): + initialEstimate.insert( + X(i), + pose.compose( + Pose3(Rot3.Rodrigues(-0.1, 0.2, 0.25), Point3(0.05, -0.10, 0.20)) + ), + ) + for j, point in enumerate(points): + initialEstimate.insert(L(j), point + Point3(-0.25, 0.20, 0.15)) + + # Optimize the graph and print results + result: Values = DoglegOptimizer(graph, initialEstimate).optimize() + result.print("Final results:\n") + + +if __name__ == "__main__": + main() From 617a99f6bf1a61cc84e35c8d26770d7dacc65518 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Aug 2024 13:26:16 -0400 Subject: [PATCH 162/398] format HybridConditional.h --- gtsam/hybrid/HybridConditional.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridConditional.h b/gtsam/hybrid/HybridConditional.h index 64bdcb2c1..fb6542822 100644 --- a/gtsam/hybrid/HybridConditional.h +++ b/gtsam/hybrid/HybridConditional.h @@ -61,7 +61,7 @@ class GTSAM_EXPORT HybridConditional public Conditional { public: // typedefs needed to play nice with gtsam - typedef HybridConditional This; ///< Typedef to this class + typedef HybridConditional This; ///< Typedef to this class typedef std::shared_ptr shared_ptr; ///< shared_ptr to this class typedef HybridFactor BaseFactor; ///< Typedef to our factor base class typedef Conditional @@ -185,7 +185,7 @@ class GTSAM_EXPORT HybridConditional * Return the log normalization constant. * Note this is 0.0 for discrete and hybrid conditionals, but depends * on the continuous parameters for Gaussian conditionals. - */ + */ double logNormalizationConstant() const override; /// Return the probability (or density) of the underlying conditional. From b4637049267cab61110251e70ebfbc0fa4363db1 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Aug 2024 13:37:28 -0400 Subject: [PATCH 163/398] improved HybridGaussianFactorGraph::printErrors --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 27 ++++++++++------------ gtsam/hybrid/HybridGaussianFactorGraph.h | 8 +++++++ 2 files changed, 20 insertions(+), 15 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 2c5d239f5..7ac6cef98 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -97,29 +97,27 @@ void HybridGaussianFactorGraph::printErrors( std::cout << "nullptr" << "\n"; } else { - factor->print(ss.str(), keyFormatter); - std::cout << "error = "; - gmf->errorTree(values.continuous()).print("", keyFormatter); - std::cout << std::endl; + gmf->operator()(values.discrete())->print(ss.str(), keyFormatter); + std::cout << "error = " << gmf->error(values) << std::endl; } } else if (auto hc = std::dynamic_pointer_cast(factor)) { if (factor == nullptr) { std::cout << "nullptr" << "\n"; } else { - factor->print(ss.str(), keyFormatter); - if (hc->isContinuous()) { + factor->print(ss.str(), keyFormatter); std::cout << "error = " << hc->asGaussian()->error(values) << "\n"; } else if (hc->isDiscrete()) { - std::cout << "error = "; - hc->asDiscrete()->errorTree().print("", keyFormatter); - std::cout << "\n"; + factor->print(ss.str(), keyFormatter); + std::cout << "error = " << hc->asDiscrete()->error(values.discrete()) + << "\n"; } else { // Is hybrid - std::cout << "error = "; - hc->asMixture()->errorTree(values.continuous()).print(); - std::cout << "\n"; + auto mixtureComponent = + hc->asMixture()->operator()(values.discrete()); + mixtureComponent->print(ss.str(), keyFormatter); + std::cout << "error = " << mixtureComponent->error(values) << "\n"; } } } else if (auto gf = std::dynamic_pointer_cast(factor)) { @@ -140,8 +138,7 @@ void HybridGaussianFactorGraph::printErrors( << "\n"; } else { factor->print(ss.str(), keyFormatter); - std::cout << "error = "; - df->errorTree().print("", keyFormatter); + std::cout << "error = " << df->error(values.discrete()) << std::endl; } } else { @@ -550,7 +547,7 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::errorTree( AlgebraicDecisionTree error_tree(0.0); // Iterate over each factor. - for (auto &f : factors_) { + for (auto &factor : factors_) { // TODO(dellaert): just use a virtual method defined in HybridFactor. AlgebraicDecisionTree factor_error; diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 1708ff64b..2ca6e4c95 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -144,6 +144,14 @@ class GTSAM_EXPORT HybridGaussianFactorGraph // const std::string& s = "HybridGaussianFactorGraph", // const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override; + /** + * @brief Print the errors of each factor in the hybrid factor graph. + * + * @param values The HybridValues for the variables used to compute the error. + * @param str String that is output before the factor graph and errors. + * @param keyFormatter Formatter function for the keys in the factors. + * @param printCondition A condition to check if a factor should be printed. + */ void printErrors( const HybridValues& values, const std::string& str = "HybridGaussianFactorGraph: ", From 1673c47ea0b232e2c00adf9d60cc9b67c50ead3b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Aug 2024 14:23:18 -0400 Subject: [PATCH 164/398] unpack HybridConditional in errorTree --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 5 ++ .../tests/testHybridGaussianFactorGraph.cpp | 51 +++++++++++++++++++ 2 files changed, 56 insertions(+) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 7ac6cef98..6c3442f97 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -551,6 +551,11 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::errorTree( // TODO(dellaert): just use a virtual method defined in HybridFactor. AlgebraicDecisionTree factor_error; + auto f = factor; + if (auto hc = dynamic_pointer_cast(factor)) { + f = hc->inner(); + } + if (auto gaussianMixture = dynamic_pointer_cast(f)) { // Compute factor error and add it. error_tree = error_tree + gaussianMixture->errorTree(continuousValues); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 5be2f2742..68b3b8215 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -598,6 +598,57 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) { EXPECT(assert_equal(expected_probs, probs, 1e-7)); } +/* ****************************************************************************/ +// Test hybrid gaussian factor graph errorTree when there is a HybridConditional in the graph +TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) { + using symbol_shorthand::F; + + DiscreteKey m1(M(1), 2); + Key z0 = Z(0), f01 = F(0); + Key x0 = X(0), x1 = X(1); + + HybridBayesNet hbn; + + auto prior_model = noiseModel::Isotropic::Sigma(1, 1e-1); + auto measurement_model = noiseModel::Isotropic::Sigma(1, 2.0); + + // Set a prior P(x0) at x0=0 + hbn.emplace_back( + new GaussianConditional(x0, Vector1(0.0), I_1x1, prior_model)); + + // Add measurement P(z0 | x0) + hbn.emplace_back(new GaussianConditional(z0, Vector1(0.0), -I_1x1, x0, I_1x1, + measurement_model)); + + // Add hybrid motion model + double mu = 0.0; + double sigma0 = 1e2, sigma1 = 1e-2; + auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); + auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); + auto c0 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, + x0, -I_1x1, model0), + c1 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, + x0, -I_1x1, model1); + hbn.emplace_back(new GaussianMixture({f01}, {x0, x1}, {m1}, {c0, c1})); + + // Discrete uniform prior. + hbn.emplace_back(new DiscreteConditional(m1, "0.5/0.5")); + + VectorValues given; + given.insert(z0, Vector1(0.0)); + given.insert(f01, Vector1(0.0)); + auto gfg = hbn.toFactorGraph(given); + + VectorValues vv; + vv.insert(x0, Vector1(1.0)); + vv.insert(x1, Vector1(2.0)); + AlgebraicDecisionTree errorTree = gfg.errorTree(vv); + + // regression + AlgebraicDecisionTree expected(m1, 59.335390372, 5050.125); + EXPECT(assert_equal(expected, errorTree, 1e-9)); +} + /* ****************************************************************************/ // Check that assembleGraphTree assembles Gaussian factor graphs for each // assignment. From ae423798673e200f47b0edd4d25fa6c5fbddfdff Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 29 Aug 2024 15:16:04 -0700 Subject: [PATCH 165/398] Move to cmake 3.5 --- CMakeLists.txt | 2 +- cmake/example_cmake_find_gtsam/CMakeLists.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 66b1804af..916f9df32 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,4 +1,4 @@ -cmake_minimum_required(VERSION 3.0) +cmake_minimum_required(VERSION 3.5) # Set the version number for the library set (GTSAM_VERSION_MAJOR 4) diff --git a/cmake/example_cmake_find_gtsam/CMakeLists.txt b/cmake/example_cmake_find_gtsam/CMakeLists.txt index d020f7032..05216d3f3 100644 --- a/cmake/example_cmake_find_gtsam/CMakeLists.txt +++ b/cmake/example_cmake_find_gtsam/CMakeLists.txt @@ -1,7 +1,7 @@ # This file shows how to build and link a user project against GTSAM using CMake ################################################################################### # To create your own project, replace "example" with the actual name of your project -cmake_minimum_required(VERSION 3.0) +cmake_minimum_required(VERSION 3.5) project(example CXX) # Find GTSAM, either from a local build, or from a Debian/Ubuntu package. From 7b96d1e32525dbb123653058e1aab4e82270a782 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 29 Aug 2024 15:32:26 -0700 Subject: [PATCH 166/398] Set policies --- CMakeLists.txt | 4 ++++ cmake/HandleBoost.cmake | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 916f9df32..9bd72d2e6 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,4 +1,8 @@ cmake_minimum_required(VERSION 3.5) +cmake_policy(SET CMP0082 NEW) # install from sub-directories immediately +cmake_policy(SET CMP0102 NEW) # set policy on advanced variables and cmake cache +cmake_policy(SET CMP0156 NEW) # new linker strategies +cmake_policy(SET CMP0167 OLD) # Don't complain about boost # Set the version number for the library set (GTSAM_VERSION_MAJOR 4) diff --git a/cmake/HandleBoost.cmake b/cmake/HandleBoost.cmake index 6c742cfe5..03251126e 100644 --- a/cmake/HandleBoost.cmake +++ b/cmake/HandleBoost.cmake @@ -25,7 +25,7 @@ endif() set(BOOST_FIND_MINIMUM_VERSION 1.65) set(BOOST_FIND_MINIMUM_COMPONENTS serialization system filesystem thread program_options date_time timer chrono regex) -find_package(Boost ${BOOST_FIND_MINIMUM_VERSION} COMPONENTS ${BOOST_FIND_MINIMUM_COMPONENTS}) +find_package(Boost ${BOOST_FIND_MINIMUM_VERSION} COMPONENTS ${BOOST_FIND_MINIMUM_COMPONENTS} REQUIRED) # Required components if(NOT Boost_SERIALIZATION_LIBRARY OR NOT Boost_SYSTEM_LIBRARY OR NOT Boost_FILESYSTEM_LIBRARY OR From 1236e058a11c168006287f614a57b171cf432165 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Aug 2024 19:05:38 -0400 Subject: [PATCH 167/398] rename sigmoid to prob_m_z --- .../tests/testGaussianMixtureFactor.cpp | 32 ++++++++++--------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 3d26ae573..187d9cf23 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -205,17 +205,20 @@ namespace test_gmm { /** * Function to compute P(m=1|z). For P(m=0|z), swap mus and sigmas. + * If sigma0 == sigma1, it simplifies to a sigmoid function. + * * Follows equation 7.108 since it is more generic. */ -double sigmoid(double mu0, double mu1, double sigma0, double sigma1, double z) { +double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, + double z) { double x1 = ((z - mu0) / sigma0), x2 = ((z - mu1) / sigma1); double d = sigma0 / sigma1; double e = d * std::exp(-0.5 * (x1 * x1 - x2 * x2)); return 1 / (1 + e); }; -HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, double sigma0, - double sigma1) { +static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, + double sigma0, double sigma1) { DiscreteKey m(M(0), 2); Key z = Z(0); @@ -269,7 +272,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); EXPECT_DOUBLES_EQUAL( - sigmoid(mu0, mu1, sigma, sigma, midway), + prob_m_z(mu0, mu1, sigma, sigma, midway), bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), 1e-8); @@ -288,7 +291,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); EXPECT_DOUBLES_EQUAL( - sigmoid(mu0, mu1, sigma, sigma, midway - lambda), + prob_m_z(mu0, mu1, sigma, sigma, midway - lambda), bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), 1e-8); } @@ -301,7 +304,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); EXPECT_DOUBLES_EQUAL( - sigmoid(mu0, mu1, sigma, sigma, midway + lambda), + prob_m_z(mu0, mu1, sigma, sigma, midway + lambda), bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), 1e-8); } @@ -349,11 +352,11 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel2) { namespace test_two_state_estimation { /// Create Two State Bayes Network with measurements -HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, - double sigma1, - bool add_second_measurement = false, - double prior_sigma = 1e-3, - double measurement_sigma = 3.0) { +static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, + double sigma1, + bool add_second_measurement = false, + double prior_sigma = 1e-3, + double measurement_sigma = 3.0) { DiscreteKey m1(M(1), 2); Key z0 = Z(0), z1 = Z(1), f01 = F(0); Key x0 = X(0), x1 = X(1); @@ -401,7 +404,7 @@ HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, /* ************************************************************************* */ /** - * Test a model P(x0)P(z0|x0)P(f01|x1,x0,m1)P(z1|x1)P(m1). + * Test a model P(x0)P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). * * P(f01|x1,x0,m1) has different means and same covariance. * @@ -435,7 +438,6 @@ TEST(GaussianMixtureFactor, TwoStateModel) { // Since no measurement on x1, we hedge our bets DiscreteConditional expected(m1, "0.5/0.5"); - // regression EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()))); } @@ -457,7 +459,7 @@ TEST(GaussianMixtureFactor, TwoStateModel) { /* ************************************************************************* */ /** - * Test a model P(x0)P(z0|x0)P(f01|x1,x0,m1)P(z1|x1)P(m1). + * Test a model P(x0)P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). * * P(f01|x1,x0,m1) has different means and different covariances. * @@ -520,4 +522,4 @@ int main() { TestResult tr; return TestRegistry::runAllTests(tr); } -/* ************************************************************************* */ \ No newline at end of file +/* ************************************************************************* */ From 88605c851baeae92f507f0fbb228bc0fa063f784 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Aug 2024 19:32:00 -0400 Subject: [PATCH 168/398] make GaussianMixture2 test as good as GaussianMixture test --- .../tests/testGaussianMixtureFactor.cpp | 65 +++++++++++++++---- 1 file changed, 51 insertions(+), 14 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 187d9cf23..8d2fba821 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -333,20 +333,56 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel2) { auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma0, sigma1); - // The result should be a bell curve like function - // with m1 > m0 close to 3.1333. - // We get 3.1333 by finding the maximum value of the function. - VectorValues given; - given.insert(z, Vector1(3.133)); - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + double m1_high = 3.133, lambda = 4; + { + // The result should be a bell curve like function + // with m1 > m0 close to 3.1333. + // We get 3.1333 by finding the maximum value of the function. + VectorValues given; + given.insert(z, Vector1(3.133)); - // regression - HybridBayesNet expected; - expected.emplace_back( - new DiscreteConditional(m, "0.325603277954/0.674396722046")); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - EXPECT(assert_equal(expected, *bn)); + EXPECT_DOUBLES_EQUAL( + prob_m_z(mu0, mu1, sigma0, sigma1, m1_high), + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{M(0), 1}}), 1e-8); + + // At the halfway point between the means + HybridBayesNet expected; + expected.emplace_back(new DiscreteConditional( + m, {}, + vector{prob_m_z(mu1, mu0, sigma1, sigma0, m1_high), + prob_m_z(mu0, mu1, sigma0, sigma1, m1_high)})); + + EXPECT(assert_equal(expected, *bn)); + } + { + // Shift by -lambda + VectorValues given; + given.insert(z, Vector1(m1_high - lambda)); + + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + EXPECT_DOUBLES_EQUAL( + prob_m_z(mu0, mu1, sigma0, sigma1, m1_high - lambda), + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), + 1e-8); + } + { + // Shift by lambda + VectorValues given; + given.insert(z, Vector1(m1_high + lambda)); + + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + EXPECT_DOUBLES_EQUAL( + prob_m_z(mu0, mu1, sigma0, sigma1, m1_high + lambda), + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), + 1e-8); + } } namespace test_two_state_estimation { @@ -470,7 +506,8 @@ TEST(GaussianMixtureFactor, TwoStateModel) { * the P(m1) should be 0.5/0.5. * Getting a measurement on z1 gives use more information. */ -TEST(GaussianMixtureFactor, TwoStateModel2) { +// TODO(Varun) Figuring out the correctness of this +TEST_DISABLED(GaussianMixtureFactor, TwoStateModel2) { using namespace test_two_state_estimation; double mu0 = 1.0, mu1 = 3.0; @@ -479,7 +516,7 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); DiscreteKey m1(M(1), 2); - Key z0 = Z(0), z1 = Z(1), f01 = F(0); + Key z0 = Z(0), z1 = Z(1); // Start with no measurement on x1, only on x0 HybridBayesNet hbn = CreateBayesNet(mu0, mu1, sigma0, sigma1, false); From ca073fd9ef32718161a53c903f8d258438009ed8 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Aug 2024 19:32:19 -0400 Subject: [PATCH 169/398] fix bug in prob_m_z --- gtsam/hybrid/tests/testGaussianMixtureFactor.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 8d2fba821..c473e567c 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -212,7 +212,7 @@ namespace test_gmm { double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, double z) { double x1 = ((z - mu0) / sigma0), x2 = ((z - mu1) / sigma1); - double d = sigma0 / sigma1; + double d = sigma1 / sigma0; double e = d * std::exp(-0.5 * (x1 * x1 - x2 * x2)); return 1 / (1 + e); }; From b18dc3c812f167dc97ecd118397678344b9d53f7 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Aug 2024 19:38:00 -0400 Subject: [PATCH 170/398] fix motion model for TwoStateModel test --- .../hybrid/tests/testGaussianMixtureFactor.cpp | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index c473e567c..c8ffdc63b 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -394,7 +394,7 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, double prior_sigma = 1e-3, double measurement_sigma = 3.0) { DiscreteKey m1(M(1), 2); - Key z0 = Z(0), z1 = Z(1), f01 = F(0); + Key z0 = Z(0), z1 = Z(1); Key x0 = X(0), x1 = X(1); HybridBayesNet hbn; @@ -414,12 +414,12 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, // Add hybrid motion model auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); - auto c0 = make_shared(f01, Vector1(mu0), I_1x1, x1, - I_1x1, x0, -I_1x1, model0), - c1 = make_shared(f01, Vector1(mu1), I_1x1, x1, - I_1x1, x0, -I_1x1, model1); + auto c0 = make_shared(x1, Vector1(mu0), I_1x1, x0, + -I_1x1, model0), + c1 = make_shared(x1, Vector1(mu1), I_1x1, x0, + -I_1x1, model1); - auto motion = new GaussianMixture({f01}, {x0, x1}, {m1}, {c0, c1}); + auto motion = new GaussianMixture({x1}, {x0}, {m1}, {c0, c1}); hbn.emplace_back(motion); if (add_second_measurement) { @@ -458,15 +458,13 @@ TEST(GaussianMixtureFactor, TwoStateModel) { double sigma = 2.0; DiscreteKey m1(M(1), 2); - Key z0 = Z(0), z1 = Z(1), f01 = F(0); + Key z0 = Z(0), z1 = Z(1); // Start with no measurement on x1, only on x0 HybridBayesNet hbn = CreateBayesNet(mu0, mu1, sigma, sigma, false); VectorValues given; given.insert(z0, Vector1(0.5)); - // The motion model measurement says we didn't move - given.insert(f01, Vector1(0.0)); { HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); @@ -523,8 +521,6 @@ TEST_DISABLED(GaussianMixtureFactor, TwoStateModel2) { VectorValues given; given.insert(z0, Vector1(0.5)); - // The motion model measurement says we didn't move - // given.insert(x1, Vector1(0.0)); { // Start with no measurement on x1, only on x0 From e698073d6fb775089c7823455fb61fd58b6b09e6 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Fri, 30 Aug 2024 10:39:52 -0700 Subject: [PATCH 171/398] Check if policy exists --- CMakeLists.txt | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 9bd72d2e6..3306e3470 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,8 +1,16 @@ cmake_minimum_required(VERSION 3.5) -cmake_policy(SET CMP0082 NEW) # install from sub-directories immediately +if (POLICY CMP0082) + cmake_policy(SET CMP0082 NEW) # install from sub-directories immediately +endif() +if (POLICY CMP0102) cmake_policy(SET CMP0102 NEW) # set policy on advanced variables and cmake cache +endif() +if (POLICY CMP0156) cmake_policy(SET CMP0156 NEW) # new linker strategies +endif() +if (POLICY CMP0167) cmake_policy(SET CMP0167 OLD) # Don't complain about boost +endif() # Set the version number for the library set (GTSAM_VERSION_MAJOR 4) From d34dd659bc65165fe862b3012f126305134164b9 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Sep 2024 15:10:52 -0400 Subject: [PATCH 172/398] remove overwhelming prior and adjust measurements accordingly --- .../hybrid/tests/testGaussianMixtureFactor.cpp | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index c8ffdc63b..cfcf33e4d 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -399,13 +399,7 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, HybridBayesNet hbn; - auto prior_model = noiseModel::Isotropic::Sigma(1, prior_sigma); auto measurement_model = noiseModel::Isotropic::Sigma(1, measurement_sigma); - - // Set a prior P(x0) at x0=0 - hbn.emplace_back( - new GaussianConditional(x0, Vector1(0.0), I_1x1, prior_model)); - // Add measurement P(z0 | x0) auto p_z0 = new GaussianConditional(z0, Vector1(0.0), -I_1x1, x0, I_1x1, measurement_model); @@ -479,13 +473,14 @@ TEST(GaussianMixtureFactor, TwoStateModel) { // Now we add a measurement z1 on x1 hbn = CreateBayesNet(mu0, mu1, sigma, sigma, true); - // If we see z1=2.2, discrete mode should say m1=1 - given.insert(z1, Vector1(2.2)); + // If we see z1=2.6 (> 2.5 which is the halfway point), + // discrete mode should say m1=1 + given.insert(z1, Vector1(2.6)); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); // Since we have a measurement on z2, we get a definite result - DiscreteConditional expected(m1, "0.4923083/0.5076917"); + DiscreteConditional expected(m1, "0.49772729/0.50227271"); // regression EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); } @@ -504,8 +499,7 @@ TEST(GaussianMixtureFactor, TwoStateModel) { * the P(m1) should be 0.5/0.5. * Getting a measurement on z1 gives use more information. */ -// TODO(Varun) Figuring out the correctness of this -TEST_DISABLED(GaussianMixtureFactor, TwoStateModel2) { +TEST(GaussianMixtureFactor, TwoStateModel2) { using namespace test_two_state_estimation; double mu0 = 1.0, mu1 = 3.0; @@ -544,7 +538,7 @@ TEST_DISABLED(GaussianMixtureFactor, TwoStateModel2) { HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); // Since we have a measurement on z2, we get a definite result - DiscreteConditional expected(m1, "0.4262682/0.5737318"); + DiscreteConditional expected(m1, "0.44744586/0.55255414"); // regression EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); } From 82fc9b9eeb94427a093a2a86a76923ff5e798de1 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Sep 2024 15:17:18 -0400 Subject: [PATCH 173/398] account for extra error when sigmas are different --- gtsam/hybrid/GaussianMixture.cpp | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 0a0332af8..228e851bb 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -87,7 +87,18 @@ GaussianFactorGraphTree GaussianMixture::add( /* *******************************************************************************/ GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { - auto wrap = [](const GaussianConditional::shared_ptr &gc) { + auto wrap = [this](const GaussianConditional::shared_ptr &gc) { + const double Cgm_Kgcm = this->logConstant_ - gc->logNormalizationConstant(); + // If there is a difference in the covariances, we need to account for that + // since the error is dependent on the mode. + if (Cgm_Kgcm > 0.0) { + // We add a constant factor which will be used when computing + // the probability of the discrete variables. + Vector c(1); + c << std::sqrt(2.0 * Cgm_Kgcm); + auto constantFactor = std::make_shared(c); + return GaussianFactorGraph{gc, constantFactor}; + } return GaussianFactorGraph{gc}; }; return {conditionals_, wrap}; From 06ecf00dba3fa77124bce8eb0b47dc710d4c2328 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Sep 2024 15:27:33 -0400 Subject: [PATCH 174/398] improve docs when computing discreteSeparator --- gtsam/hybrid/HybridFactorGraph.h | 2 +- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/HybridFactorGraph.h b/gtsam/hybrid/HybridFactorGraph.h index 8b59fd4f9..1830d44ab 100644 --- a/gtsam/hybrid/HybridFactorGraph.h +++ b/gtsam/hybrid/HybridFactorGraph.h @@ -66,7 +66,7 @@ class GTSAM_EXPORT HybridFactorGraph : public FactorGraph { /// Get all the discrete keys in the factor graph. std::set discreteKeys() const; - /// Get all the discrete keys in the factor graph, as a set. + /// Get all the discrete keys in the factor graph, as a set of Keys. KeySet discreteKeySet() const; /// Get a map from Key to corresponding DiscreteKey. diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 6c3442f97..c44fc4f1d 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -330,7 +330,7 @@ static std::shared_ptr createDiscreteFactor( // exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); // We take negative of the logNormalizationConstant `log(1/k)` // to get `log(k)`. - return -factor->error(kEmpty) + (-conditional->logNormalizationConstant()); + return -factor->error(kEmpty) - conditional->logNormalizationConstant(); }; AlgebraicDecisionTree logProbabilities( @@ -523,14 +523,15 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, std::inserter(continuousSeparator, continuousSeparator.begin())); // Similarly for the discrete separator. - KeySet discreteSeparatorSet; - std::set discreteSeparator; auto discreteKeySet = factors.discreteKeySet(); + // Use set-difference to get the difference in keys + KeySet discreteSeparatorSet; std::set_difference( discreteKeySet.begin(), discreteKeySet.end(), frontalKeysSet.begin(), frontalKeysSet.end(), std::inserter(discreteSeparatorSet, discreteSeparatorSet.begin())); // Convert from set of keys to set of DiscreteKeys + std::set discreteSeparator; auto discreteKeyMap = factors.discreteKeyMap(); for (auto key : discreteSeparatorSet) { discreteSeparator.insert(discreteKeyMap.at(key)); From 3f782a4ae7cf99bdc3e40205d665721651fa5ee4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Sep 2024 17:36:45 -0400 Subject: [PATCH 175/398] check for valid GaussianConditional --- gtsam/hybrid/GaussianMixture.cpp | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 228e851bb..fa4248921 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -88,16 +88,20 @@ GaussianFactorGraphTree GaussianMixture::add( /* *******************************************************************************/ GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { auto wrap = [this](const GaussianConditional::shared_ptr &gc) { - const double Cgm_Kgcm = this->logConstant_ - gc->logNormalizationConstant(); - // If there is a difference in the covariances, we need to account for that - // since the error is dependent on the mode. - if (Cgm_Kgcm > 0.0) { - // We add a constant factor which will be used when computing - // the probability of the discrete variables. - Vector c(1); - c << std::sqrt(2.0 * Cgm_Kgcm); - auto constantFactor = std::make_shared(c); - return GaussianFactorGraph{gc, constantFactor}; + // First check if conditional has not been pruned + if (gc) { + const double Cgm_Kgcm = + this->logConstant_ - gc->logNormalizationConstant(); + // If there is a difference in the covariances, we need to account for + // that since the error is dependent on the mode. + if (Cgm_Kgcm > 0.0) { + // We add a constant factor which will be used when computing + // the probability of the discrete variables. + Vector c(1); + c << std::sqrt(2.0 * Cgm_Kgcm); + auto constantFactor = std::make_shared(c); + return GaussianFactorGraph{gc, constantFactor}; + } } return GaussianFactorGraph{gc}; }; From 997d0b411b2f7a03124cd28a75e12c6729b9a7ac Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Sep 2024 17:39:52 -0400 Subject: [PATCH 176/398] ratio tests for GaussianMixtureFactor --- .../tests/testGaussianMixtureFactor.cpp | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index cfcf33e4d..139b50e2a 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -519,6 +519,24 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { { // Start with no measurement on x1, only on x0 HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + + { + VectorValues vv{ + {X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}, {Z(0), Vector1(0.5)}}; + HybridValues hv0(vv, DiscreteValues{{M(1), 0}}), + hv1(vv, DiscreteValues{{M(1), 1}}); + EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), + gfg.error(hv1) / hbn.error(hv1), 1e-9); + } + { + VectorValues vv{ + {X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}, {Z(0), Vector1(0.5)}}; + HybridValues hv0(vv, DiscreteValues{{M(1), 0}}), + hv1(vv, DiscreteValues{{M(1), 1}}); + EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), + gfg.error(hv1) / hbn.error(hv1), 1e-9); + } + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); // Since no measurement on x1, we a 50/50 probability @@ -535,6 +553,28 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { given.insert(z1, Vector1(2.2)); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + + { + VectorValues vv{{X(0), Vector1(0.0)}, + {X(1), Vector1(1.0)}, + {Z(0), Vector1(0.5)}, + {Z(1), Vector1(2.2)}}; + HybridValues hv0(vv, DiscreteValues{{M(1), 0}}), + hv1(vv, DiscreteValues{{M(1), 1}}); + EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), + gfg.error(hv1) / hbn.error(hv1), 1e-9); + } + { + VectorValues vv{{X(0), Vector1(0.5)}, + {X(1), Vector1(3.0)}, + {Z(0), Vector1(0.5)}, + {Z(1), Vector1(2.2)}}; + HybridValues hv0(vv, DiscreteValues{{M(1), 0}}), + hv1(vv, DiscreteValues{{M(1), 1}}); + EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), + gfg.error(hv1) / hbn.error(hv1), 1e-9); + } + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); // Since we have a measurement on z2, we get a definite result From cc0400371652f00dc066c4cc854e03d35b1959d3 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 4 Sep 2024 15:18:27 -0400 Subject: [PATCH 177/398] conditionalError method --- gtsam/hybrid/GaussianMixture.cpp | 53 ++++++++++++-------------------- gtsam/hybrid/GaussianMixture.h | 4 +++ 2 files changed, 23 insertions(+), 34 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index fa4248921..325c32f95 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -330,19 +330,28 @@ AlgebraicDecisionTree GaussianMixture::logProbability( return DecisionTree(conditionals_, probFunc); } +/* ************************************************************************* */ +double GaussianMixture::conditionalError( + const GaussianConditional::shared_ptr &conditional, + const VectorValues &continuousValues) const { + // Check if valid pointer + if (conditional) { + return conditional->error(continuousValues) + // + logConstant_ - conditional->logNormalizationConstant(); + } else { + // If not valid, pointer, it means this conditional was pruned, + // so we return maximum error. + // This way the negative exponential will give + // a probability value close to 0.0. + return std::numeric_limits::max(); + } +} + /* *******************************************************************************/ AlgebraicDecisionTree GaussianMixture::errorTree( const VectorValues &continuousValues) const { auto errorFunc = [&](const GaussianConditional::shared_ptr &conditional) { - // Check if valid pointer - if (conditional) { - return conditional->error(continuousValues) + // - logConstant_ - conditional->logNormalizationConstant(); - } else { - // If not valid, pointer, it means this conditional was pruned, - // so we return maximum error. - return std::numeric_limits::max(); - } + return conditionalError(conditional, continuousValues); }; DecisionTree error_tree(conditionals_, errorFunc); return error_tree; @@ -350,33 +359,9 @@ AlgebraicDecisionTree GaussianMixture::errorTree( /* *******************************************************************************/ double GaussianMixture::error(const HybridValues &values) const { - // Check if discrete keys in discrete assignment are - // present in the GaussianMixture - KeyVector dKeys = this->discreteKeys_.indices(); - bool valid_assignment = false; - for (auto &&kv : values.discrete()) { - if (std::find(dKeys.begin(), dKeys.end(), kv.first) != dKeys.end()) { - valid_assignment = true; - break; - } - } - - // The discrete assignment is not valid so we throw an error. - if (!valid_assignment) { - throw std::runtime_error( - "Invalid discrete values in values. Not all discrete keys specified."); - } - // Directly index to get the conditional, no need to build the whole tree. auto conditional = conditionals_(values.discrete()); - if (conditional) { - return conditional->error(values.continuous()) + // - logConstant_ - conditional->logNormalizationConstant(); - } else { - // If not valid, pointer, it means this conditional was pruned, - // so we return maximum error. - return std::numeric_limits::max(); - } + return conditionalError(conditional, values.continuous()); } /* *******************************************************************************/ diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index 41692bb1c..714c00272 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -256,6 +256,10 @@ class GTSAM_EXPORT GaussianMixture /// Check whether `given` has values for all frontal keys. bool allFrontalsGiven(const VectorValues &given) const; + /// Helper method to compute the error of a conditional. + double conditionalError(const GaussianConditional::shared_ptr &conditional, + const VectorValues &continuousValues) const; + #ifdef GTSAM_ENABLE_BOOST_SERIALIZATION /** Serialization function */ friend class boost::serialization::access; From 8f65f058e04a5b33fe1650cd2ca3b82106d57357 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 4 Sep 2024 15:18:50 -0400 Subject: [PATCH 178/398] clean up code --- gtsam/hybrid/HybridFactorGraph.cpp | 9 --------- gtsam/hybrid/HybridFactorGraph.h | 5 +---- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 20 +++++-------------- .../tests/testGaussianMixtureFactor.cpp | 8 ++++---- 4 files changed, 10 insertions(+), 32 deletions(-) diff --git a/gtsam/hybrid/HybridFactorGraph.cpp b/gtsam/hybrid/HybridFactorGraph.cpp index f7b96f694..f5a7bcdfe 100644 --- a/gtsam/hybrid/HybridFactorGraph.cpp +++ b/gtsam/hybrid/HybridFactorGraph.cpp @@ -49,15 +49,6 @@ KeySet HybridFactorGraph::discreteKeySet() const { return keys; } -/* ************************************************************************* */ -std::unordered_map HybridFactorGraph::discreteKeyMap() const { - std::unordered_map result; - for (const DiscreteKey& k : discreteKeys()) { - result[k.first] = k; - } - return result; -} - /* ************************************************************************* */ const KeySet HybridFactorGraph::continuousKeySet() const { KeySet keys; diff --git a/gtsam/hybrid/HybridFactorGraph.h b/gtsam/hybrid/HybridFactorGraph.h index 1830d44ab..79f2a7af1 100644 --- a/gtsam/hybrid/HybridFactorGraph.h +++ b/gtsam/hybrid/HybridFactorGraph.h @@ -38,7 +38,7 @@ using SharedFactor = std::shared_ptr; class GTSAM_EXPORT HybridFactorGraph : public FactorGraph { public: using Base = FactorGraph; - using This = HybridFactorGraph; ///< this class + using This = HybridFactorGraph; ///< this class using shared_ptr = std::shared_ptr; ///< shared_ptr to This using Values = gtsam::Values; ///< backwards compatibility @@ -69,9 +69,6 @@ class GTSAM_EXPORT HybridFactorGraph : public FactorGraph { /// Get all the discrete keys in the factor graph, as a set of Keys. KeySet discreteKeySet() const; - /// Get a map from Key to corresponding DiscreteKey. - std::unordered_map discreteKeyMap() const; - /// Get all the continuous keys in the factor graph. const KeySet continuousKeySet() const; diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index c44fc4f1d..a7a6eee5a 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -312,8 +312,8 @@ using Result = std::pair, GaussianMixtureFactor::sharedFactor>; /** - * Compute the probability q(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m) - * from the residual error at the mean μ. + * Compute the probability p(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m) + * from the residual error ||b||^2 at the mean μ. * The residual error contains no keys, and only * depends on the discrete separator if present. */ @@ -523,19 +523,9 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, std::inserter(continuousSeparator, continuousSeparator.begin())); // Similarly for the discrete separator. - auto discreteKeySet = factors.discreteKeySet(); - // Use set-difference to get the difference in keys - KeySet discreteSeparatorSet; - std::set_difference( - discreteKeySet.begin(), discreteKeySet.end(), frontalKeysSet.begin(), - frontalKeysSet.end(), - std::inserter(discreteSeparatorSet, discreteSeparatorSet.begin())); - // Convert from set of keys to set of DiscreteKeys - std::set discreteSeparator; - auto discreteKeyMap = factors.discreteKeyMap(); - for (auto key : discreteSeparatorSet) { - discreteSeparator.insert(discreteKeyMap.at(key)); - } + // Since we eliminate all continuous variables first, + // the discrete separator will be *all* the discrete keys. + std::set discreteSeparator = factors.discreteKeys(); return hybridElimination(factors, frontalKeys, continuousSeparator, discreteSeparator); diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 139b50e2a..b2a4981f3 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -434,12 +434,12 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, /* ************************************************************************* */ /** - * Test a model P(x0)P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). + * Test a model P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). * * P(f01|x1,x0,m1) has different means and same covariance. * * Converting to a factor graph gives us - * P(x0)ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) + * ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) * * If we only have a measurement on z0, then * the probability of m1 should be 0.5/0.5. @@ -488,12 +488,12 @@ TEST(GaussianMixtureFactor, TwoStateModel) { /* ************************************************************************* */ /** - * Test a model P(x0)P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). + * Test a model P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). * * P(f01|x1,x0,m1) has different means and different covariances. * * Converting to a factor graph gives us - * P(x0)ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) + * ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) * * If we only have a measurement on z0, then * the P(m1) should be 0.5/0.5. From 8b04d9b2f510bc310b24c212149fd08c468c6fd8 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 4 Sep 2024 19:27:57 -0400 Subject: [PATCH 179/398] remove tests, will add later --- gtsam/hybrid/tests/testMixtureFactor.cpp | 150 ----------------------- 1 file changed, 150 deletions(-) diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index 48b193eeb..a58a4767f 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -118,156 +118,6 @@ TEST(MixtureFactor, Dim) { EXPECT_LONGS_EQUAL(1, mixtureFactor.dim()); } -/* ************************************************************************* */ -// Test components with differing means -TEST(MixtureFactor, DifferentMeans) { - DiscreteKey m1(M(1), 2), m2(M(2), 2); - - Values values; - double x1 = 0.0, x2 = 1.75, x3 = 2.60; - values.insert(X(1), x1); - values.insert(X(2), x2); - values.insert(X(3), x3); - - auto model0 = noiseModel::Isotropic::Sigma(1, 1e-0); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-0); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-0); - - auto f0 = std::make_shared>(X(1), X(2), 0.0, model0); - auto f1 = std::make_shared>(X(1), X(2), 2.0, model1); - std::vector factors{f0, f1}; - - MixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); - HybridNonlinearFactorGraph hnfg; - hnfg.push_back(mixtureFactor); - - f0 = std::make_shared>(X(2), X(3), 0.0, model0); - f1 = std::make_shared>(X(2), X(3), 2.0, model1); - std::vector factors23{f0, f1}; - hnfg.push_back(MixtureFactor({X(2), X(3)}, {m2}, factors23)); - - auto prior = PriorFactor(X(1), x1, prior_noise); - hnfg.push_back(prior); - - hnfg.emplace_shared>(X(2), 2.0, prior_noise); - - auto hgfg = hnfg.linearize(values); - auto bn = hgfg->eliminateSequential(); - HybridValues actual = bn->optimize(); - - HybridValues expected( - VectorValues{ - {X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}, {X(3), Vector1(-0.6)}}, - DiscreteValues{{M(1), 1}, {M(2), 0}}); - - EXPECT(assert_equal(expected, actual)); - - { - DiscreteValues dv{{M(1), 0}, {M(2), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 0}, {M(2), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}, {M(2), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}, {M(2), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); - } -} - -/* ************************************************************************* */ -// Test components with differing covariances -TEST(MixtureFactor, DifferentCovariances) { - DiscreteKey m1(M(1), 2); - - Values values; - double x1 = 1.0, x2 = 1.0; - values.insert(X(1), x1); - values.insert(X(2), x2); - - double between = 0.0; - - auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - - auto f0 = - std::make_shared>(X(1), X(2), between, model0); - auto f1 = - std::make_shared>(X(1), X(2), between, model1); - std::vector factors{f0, f1}; - - // Create via toFactorGraph - using symbol_shorthand::Z; - Matrix H0_1, H0_2, H1_1, H1_2; - Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); - std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H0_1 /*Sp1*/}, - {X(2), H0_2 /*Tp2*/}}; - - Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); - std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H1_1 /*Sp1*/}, - {X(2), H1_2 /*Tp2*/}}; - auto gm = new gtsam::GaussianMixture( - {Z(1)}, {X(1), X(2)}, {m1}, - {std::make_shared(terms0, 1, -d0, model0), - std::make_shared(terms1, 1, -d1, model1)}); - gtsam::HybridBayesNet bn; - bn.emplace_back(gm); - - gtsam::VectorValues measurements; - measurements.insert(Z(1), gtsam::Z_1x1); - // Create FG with single GaussianMixtureFactor - HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); - - // Linearized prior factor on X1 - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); - mixture_fg.push_back(prior); - - auto hbn = mixture_fg.eliminateSequential(); - - VectorValues cv; - cv.insert(X(1), Vector1(0.0)); - cv.insert(X(2), Vector1(0.0)); - // P(m1) = [0.5, 0.5], so we should pick 0 - DiscreteValues dv; - dv.insert({M(1), 0}); - HybridValues expected_values(cv, dv); - - HybridValues actual_values = hbn->optimize(); - EXPECT(assert_equal(expected_values, actual_values)); - - // Check that we get different error values at the MLE point μ. - AlgebraicDecisionTree errorTree = hbn->errorTree(cv); - - HybridValues hv0(cv, DiscreteValues{{M(1), 0}}); - HybridValues hv1(cv, DiscreteValues{{M(1), 1}}); - - AlgebraicDecisionTree expectedErrorTree(m1, 9.90348755254, - 0.69314718056); - EXPECT(assert_equal(expectedErrorTree, errorTree)); -} - /* ************************************************************************* */ int main() { TestResult tr; From 13193a1dcdea2aa5e1c6b64bc47c91a78d6d7138 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 5 Sep 2024 10:02:12 -0400 Subject: [PATCH 180/398] better comments --- .../hybrid/tests/testGaussianMixtureFactor.cpp | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 310f3b1e4..b4a643569 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -586,21 +586,21 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { /** * @brief Helper function to specify a Hybrid Bayes Net - * {P(X1) P(Z1 | X1, X2, M1)} and convert it to a Hybrid Factor Graph - * {P(X1)L(X1, X2, M1; Z1)} by converting to likelihoods given Z1. + * P(X1)P(Z1 | X1, X2, M1) and convert it to a Hybrid Factor Graph + * ϕ(X1)ϕ(X1, X2, M1; Z1) by converting to likelihoods given Z1. * * We can specify either different means or different sigmas, * or both for each hybrid factor component. * * @param values Initial values for linearization. - * @param mus The mean values for the conditional components. + * @param means The mean values for the conditional components. * @param sigmas Noise model sigma values (standard deviation). * @param m1 The discrete mode key. * @param z1 The measurement value. * @return HybridGaussianFactorGraph */ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( - const gtsam::Values &values, const std::vector &mus, + const gtsam::Values &values, const std::vector &means, const std::vector &sigmas, DiscreteKey &m1, double z1 = 0.0) { // Noise models auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); @@ -608,9 +608,11 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); // GaussianMixtureFactor component factors - auto f0 = std::make_shared>(X(0), X(1), mus[0], model0); - auto f1 = std::make_shared>(X(0), X(1), mus[1], model1); - // std::vector factors{f0, f1}; + auto f0 = + std::make_shared>(X(0), X(1), means[0], model0); + auto f1 = + std::make_shared>(X(0), X(1), means[1], model1); + std::vector factors{f0, f1}; /// Get terms for each p^m(z1 | x1, x2) Matrix H0_1, H0_2, H1_1, H1_2; @@ -651,7 +653,7 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( /** * @brief Test components with differing means. * - * We specify a hybrid Bayes network P(Z | X, M) =p(X1)p(Z1 | X1, X2, M1), + * We specify a hybrid Bayes network P(Z | X, M) =P(X1)P(Z1 | X1, X2, M1), * which is then converted to a factor graph by specifying Z1. * This is a different case since now we have a hybrid factor * with 2 continuous variables ϕ(x1, x2, m1). From 3dbb16a114c56be87cf06c3a6c6603a9abce357e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 5 Sep 2024 15:51:37 -0400 Subject: [PATCH 181/398] remove const ref push_back --- gtsam/hybrid/HybridBayesNet.h | 20 +------------------ .../tests/testGaussianMixtureFactor.cpp | 11 +++++++--- 2 files changed, 9 insertions(+), 22 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 891be75da..f745b94c9 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -111,28 +111,10 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { * @return IsSharedPtr */ template - IsSharedPtr push_back(const CONDITIONAL &conditional) { + void push_back(std::shared_ptr conditional) { factors_.push_back(std::make_shared(conditional)); } - /** - * @brief Add a conditional to the Bayes net. - * Implicitly convert to a HybridConditional. - * - * E.g. - * hbn.push_back(DiscreteConditional(m, "1/1")); - * hbn.push_back(GaussianConditional(X(0), Vector1(0.0), I_1x1)); - * - * @tparam CONDITIONAL Type of conditional. This is const ref version. - * @param conditional The conditional as a const reference. - * @return IsSharedPtr - */ - template - IsNotSharedPtr push_back(const CONDITIONAL &conditional) { - auto cond_shared_ptr = std::make_shared(conditional); - push_back(cond_shared_ptr); - } - /** * Preferred: add a conditional directly using a pointer. * diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 99814229e..20fb3266e 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -228,12 +228,17 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, auto c0 = make_shared(z, Vector1(mu0), I_1x1, model0), c1 = make_shared(z, Vector1(mu1), I_1x1, model1); - GaussianMixture gm({z}, {}, {m}, {c0, c1}); - DiscreteConditional mixing(m, "0.5/0.5"); + KeyVector frontalKeys{z}, continuousParents; + DiscreteKeys discreteParents{m}; + std::vector conditionals = {c0, c1}; + auto gm = make_shared(frontalKeys, continuousParents, + discreteParents, conditionals); HybridBayesNet hbn; hbn.push_back(gm); - hbn.push_back(mixing); + + auto mixing = make_shared(m, "0.5/0.5"); + hbn.emplace_shared(mixing); return hbn; } From df12392fb3b87a81b7538bedcb43373bf70741ca Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 5 Sep 2024 18:41:58 -0400 Subject: [PATCH 182/398] remove unused templates --- gtsam/hybrid/HybridBayesNet.h | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index f745b94c9..9876e6fc1 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -33,18 +33,6 @@ namespace gtsam { * @ingroup hybrid */ class GTSAM_EXPORT HybridBayesNet : public BayesNet { - template - struct is_shared_ptr : std::false_type {}; - template - struct is_shared_ptr> : std::true_type {}; - - /// Helper templates for checking if a type is a shared pointer or not - template - using IsSharedPtr = typename std::enable_if::value>::type; - template - using IsNotSharedPtr = - typename std::enable_if::value>::type; - public: using Base = BayesNet; using This = HybridBayesNet; @@ -108,7 +96,6 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { * * @tparam CONDITIONAL Type of conditional. This is shared_ptr version. * @param conditional The conditional as a shared pointer. - * @return IsSharedPtr */ template void push_back(std::shared_ptr conditional) { From 0823fdc6e8c97341d43714f13cf5002512693d29 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 5 Sep 2024 19:03:54 -0400 Subject: [PATCH 183/398] simplify test --- gtsam/hybrid/tests/testGaussianMixtureFactor.cpp | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 20fb3266e..5ec600aa8 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -227,18 +227,13 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, auto c0 = make_shared(z, Vector1(mu0), I_1x1, model0), c1 = make_shared(z, Vector1(mu1), I_1x1, model1); - - KeyVector frontalKeys{z}, continuousParents; - DiscreteKeys discreteParents{m}; - std::vector conditionals = {c0, c1}; - auto gm = make_shared(frontalKeys, continuousParents, - discreteParents, conditionals); - - HybridBayesNet hbn; - hbn.push_back(gm); + auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1}); auto mixing = make_shared(m, "0.5/0.5"); - hbn.emplace_shared(mixing); + + HybridBayesNet hbn; + hbn.emplace_back(gm); + hbn.push_back(mixing); return hbn; } From 51a2fd5334500a09fa744c1535278a13675cf4da Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 5 Sep 2024 20:03:55 -0400 Subject: [PATCH 184/398] improved comments --- .../tests/testGaussianMixtureFactor.cpp | 64 +++++++++++++------ 1 file changed, 43 insertions(+), 21 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index b4a643569..fc832d704 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -388,10 +388,10 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel2) { namespace test_two_state_estimation { /// Create Two State Bayes Network with measurements +/// The Bayes network is P(z0|x0)P(x1|x0,m1)p(m1) and optionally p(z1|x1) static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, double sigma1, bool add_second_measurement = false, - double prior_sigma = 1e-3, double measurement_sigma = 3.0) { DiscreteKey m1(M(1), 2); Key z0 = Z(0), z1 = Z(1); @@ -436,7 +436,7 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, /** * Test a model P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). * - * P(f01|x1,x0,m1) has different means and same covariance. + * P(x1|x0,m1) has different means and same covariance. * * Converting to a factor graph gives us * ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) @@ -490,7 +490,7 @@ TEST(GaussianMixtureFactor, TwoStateModel) { /** * Test a model P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). * - * P(f01|x1,x0,m1) has different means and different covariances. + * P(x1|x0,m1) has different means and different covariances. * * Converting to a factor graph gives us * ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) @@ -612,7 +612,6 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( std::make_shared>(X(0), X(1), means[0], model0); auto f1 = std::make_shared>(X(0), X(1), means[1], model1); - std::vector factors{f0, f1}; /// Get terms for each p^m(z1 | x1, x2) Matrix H0_1, H0_2, H1_1, H1_2; @@ -635,11 +634,10 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( std::make_shared(terms1, 1, -d1, model1)}); gtsam::HybridBayesNet bn; bn.emplace_back(gm); - // bn.print(); // Create FG via toFactorGraph gtsam::VectorValues measurements; - measurements.insert(Z(1), gtsam::I_1x1 * z1); // Set Z1 = 0 + measurements.insert(Z(1), gtsam::I_1x1 * z1); // Set Z1 HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); // Linearized prior factor on X1 @@ -653,11 +651,11 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( /** * @brief Test components with differing means. * - * We specify a hybrid Bayes network P(Z | X, M) =P(X1)P(Z1 | X1, X2, M1), + * We specify a hybrid Bayes network P(Z | X, M) = P(X1)P(Z1 | X1, X2, M1), * which is then converted to a factor graph by specifying Z1. * This is a different case since now we have a hybrid factor * with 2 continuous variables ϕ(x1, x2, m1). - * p(Z1 | X1, X2, M1) has 2 factors each for the binary + * P(Z1 | X1, X2, M1) has 2 factors each for the binary * mode m1, with only the means being different. */ TEST(GaussianMixtureFactor, DifferentMeans) { @@ -686,18 +684,16 @@ TEST(GaussianMixtureFactor, DifferentMeans) { EXPECT(assert_equal(expected, actual)); { - DiscreteValues dv{{M(1), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); + DiscreteValues dv0{{M(1), 0}}; + VectorValues cont0 = bn->optimize(dv0); + double error0 = bn->error(HybridValues(cont0, dv0)); // regression - EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); + EXPECT_DOUBLES_EQUAL(0.69314718056, error0, 1e-9); + + DiscreteValues dv1{{M(1), 1}}; + VectorValues cont1 = bn->optimize(dv1); + double error1 = bn->error(HybridValues(cont1, dv1)); + EXPECT_DOUBLES_EQUAL(error0, error1, 1e-9); } } { @@ -713,10 +709,10 @@ TEST(GaussianMixtureFactor, DifferentMeans) { auto bn = hfg.eliminateSequential(); HybridValues actual = bn->optimize(); + // regression HybridValues expected( VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(0.25)}}, DiscreteValues{{M(1), 1}}); - EXPECT(assert_equal(expected, actual)); { @@ -777,6 +773,19 @@ TEST(GaussianMixtureFactor, DifferentCovariances) { EXPECT(assert_equal(expected_m1, actual_m1)); } +namespace test_direct_factor_graph { +/** + * @brief Create a Factor Graph by directly specifying all + * the factors instead of creating conditionals first. + * This way we can directly provide the likelihoods and + * then perform linearization. + * + * @param values Initial values to linearize around. + * @param mus The means of the GaussianMixtureFactor components. + * @param sigmas The covariances of the GaussianMixtureFactor components. + * @param m1 The discrete key. + * @return HybridGaussianFactorGraph + */ HybridGaussianFactorGraph CreateFactorGraph(const gtsam::Values &values, const std::vector &mus, const std::vector &sigmas, @@ -806,8 +815,19 @@ HybridGaussianFactorGraph CreateFactorGraph(const gtsam::Values &values, return hfg; } +} // namespace test_direct_factor_graph +/* ************************************************************************* */ +/** + * @brief Test components with differing means but the same covariances. + * The factor graph is + * *-X1-*-X2 + * | + * M1 + */ TEST(GaussianMixtureFactor, DifferentMeansFG) { + using namespace test_direct_factor_graph; + DiscreteKey m1(M(1), 2); Values values; @@ -878,13 +898,15 @@ TEST(GaussianMixtureFactor, DifferentMeansFG) { /* ************************************************************************* */ /** - * @brief Test components with differing covariances. + * @brief Test components with differing covariances but the same means. * The factor graph is * *-X1-*-X2 * | * M1 */ TEST(GaussianMixtureFactor, DifferentCovariancesFG) { + using namespace test_direct_factor_graph; + DiscreteKey m1(M(1), 2); Values values; From 615c04ae4132b511498fa990ec404a3c87c98a4d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 6 Sep 2024 09:22:25 -0400 Subject: [PATCH 185/398] some more refactor and remove redundant test --- .../tests/testGaussianMixtureFactor.cpp | 156 ++++++------------ 1 file changed, 55 insertions(+), 101 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index fc832d704..21d3fbf83 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -599,7 +599,7 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { * @param z1 The measurement value. * @return HybridGaussianFactorGraph */ -HybridGaussianFactorGraph GetFactorGraphFromBayesNet( +static HybridGaussianFactorGraph GetFactorGraphFromBayesNet( const gtsam::Values &values, const std::vector &means, const std::vector &sigmas, DiscreteKey &m1, double z1 = 0.0) { // Noise models @@ -649,16 +649,19 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( /* ************************************************************************* */ /** - * @brief Test components with differing means. + * @brief Test Hybrid Factor Graph. * * We specify a hybrid Bayes network P(Z | X, M) = P(X1)P(Z1 | X1, X2, M1), * which is then converted to a factor graph by specifying Z1. - * This is a different case since now we have a hybrid factor - * with 2 continuous variables ϕ(x1, x2, m1). - * P(Z1 | X1, X2, M1) has 2 factors each for the binary - * mode m1, with only the means being different. + * This is different from the TwoStateModel version since + * we use a factor with 2 continuous variables ϕ(x1, x2, m1) + * directly instead of a conditional. + * This serves as a good sanity check. + * + * P(Z1 | X1, X2, M1) has 2 conditionals each for the binary + * mode m1. */ -TEST(GaussianMixtureFactor, DifferentMeans) { +TEST(GaussianMixtureFactor, FactorGraphFromBayesNet) { DiscreteKey m1(M(1), 2); Values values; @@ -683,18 +686,16 @@ TEST(GaussianMixtureFactor, DifferentMeans) { EXPECT(assert_equal(expected, actual)); - { - DiscreteValues dv0{{M(1), 0}}; - VectorValues cont0 = bn->optimize(dv0); - double error0 = bn->error(HybridValues(cont0, dv0)); - // regression - EXPECT_DOUBLES_EQUAL(0.69314718056, error0, 1e-9); + DiscreteValues dv0{{M(1), 0}}; + VectorValues cont0 = bn->optimize(dv0); + double error0 = bn->error(HybridValues(cont0, dv0)); + // regression + EXPECT_DOUBLES_EQUAL(0.69314718056, error0, 1e-9); - DiscreteValues dv1{{M(1), 1}}; - VectorValues cont1 = bn->optimize(dv1); - double error1 = bn->error(HybridValues(cont1, dv1)); - EXPECT_DOUBLES_EQUAL(error0, error1, 1e-9); - } + DiscreteValues dv1{{M(1), 1}}; + VectorValues cont1 = bn->optimize(dv1); + double error1 = bn->error(HybridValues(cont1, dv1)); + EXPECT_DOUBLES_EQUAL(error0, error1, 1e-9); } { // If we add a measurement on X2, we have more information to work with. @@ -715,64 +716,20 @@ TEST(GaussianMixtureFactor, DifferentMeans) { DiscreteValues{{M(1), 1}}); EXPECT(assert_equal(expected, actual)); - { - DiscreteValues dv{{M(1), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(2.12692448787, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(0.126928487854, error, 1e-9); - } + DiscreteValues dv0{{M(1), 0}}; + VectorValues cont0 = bn->optimize(dv0); + // regression + EXPECT_DOUBLES_EQUAL(2.12692448787, bn->error(HybridValues(cont0, dv0)), + 1e-9); + + DiscreteValues dv1{{M(1), 1}}; + VectorValues cont1 = bn->optimize(dv1); + // regression + EXPECT_DOUBLES_EQUAL(0.126928487854, bn->error(HybridValues(cont1, dv1)), + 1e-9); } } -/* ************************************************************************* */ -/** - * @brief Test components with differing covariances - * but with a Bayes net P(Z|X, M) converted to a FG. - * Same as the DifferentMeans example but in this case, - * we keep the means the same and vary the covariances. - */ -TEST(GaussianMixtureFactor, DifferentCovariances) { - DiscreteKey m1(M(1), 2); - - Values values; - double x1 = 1.0, x2 = 1.0; - values.insert(X(0), x1); - values.insert(X(1), x2); - - std::vector means{0.0, 0.0}, sigmas{1e2, 1e-2}; - HybridGaussianFactorGraph mixture_fg = - GetFactorGraphFromBayesNet(values, means, sigmas, m1); - - auto hbn = mixture_fg.eliminateSequential(); - - VectorValues cv; - cv.insert(X(0), Vector1(0.0)); - cv.insert(X(1), Vector1(0.0)); - - // Check that the error values at the MLE point μ. - AlgebraicDecisionTree errorTree = hbn->errorTree(cv); - - DiscreteValues dv0{{M(1), 0}}; - DiscreteValues dv1{{M(1), 1}}; - - // regression - EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); - EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); - - DiscreteConditional expected_m1(m1, "0.5/0.5"); - DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); - - EXPECT(assert_equal(expected_m1, actual_m1)); -} - namespace test_direct_factor_graph { /** * @brief Create a Factor Graph by directly specifying all @@ -781,23 +738,24 @@ namespace test_direct_factor_graph { * then perform linearization. * * @param values Initial values to linearize around. - * @param mus The means of the GaussianMixtureFactor components. + * @param means The means of the GaussianMixtureFactor components. * @param sigmas The covariances of the GaussianMixtureFactor components. * @param m1 The discrete key. * @return HybridGaussianFactorGraph */ -HybridGaussianFactorGraph CreateFactorGraph(const gtsam::Values &values, - const std::vector &mus, - const std::vector &sigmas, - DiscreteKey &m1) { +static HybridGaussianFactorGraph CreateFactorGraph( + const gtsam::Values &values, const std::vector &means, + const std::vector &sigmas, DiscreteKey &m1) { auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]); auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - auto f0 = std::make_shared>(X(0), X(1), mus[0], model0) - ->linearize(values); - auto f1 = std::make_shared>(X(0), X(1), mus[1], model1) - ->linearize(values); + auto f0 = + std::make_shared>(X(0), X(1), means[0], model0) + ->linearize(values); + auto f1 = + std::make_shared>(X(0), X(1), means[1], model1) + ->linearize(values); // Create GaussianMixtureFactor std::vector factors{f0, f1}; @@ -835,9 +793,9 @@ TEST(GaussianMixtureFactor, DifferentMeansFG) { values.insert(X(0), x1); values.insert(X(1), x2); - std::vector mus = {0.0, 2.0}, sigmas = {1e-0, 1e-0}; + std::vector means = {0.0, 2.0}, sigmas = {1e-0, 1e-0}; - HybridGaussianFactorGraph hfg = CreateFactorGraph(values, mus, sigmas, m1); + HybridGaussianFactorGraph hfg = CreateFactorGraph(values, means, sigmas, m1); { auto bn = hfg.eliminateSequential(); @@ -849,26 +807,22 @@ TEST(GaussianMixtureFactor, DifferentMeansFG) { EXPECT(assert_equal(expected, actual)); - { - DiscreteValues dv{{M(1), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); - } + DiscreteValues dv0{{M(1), 0}}; + VectorValues cont0 = bn->optimize(dv0); + double error0 = bn->error(HybridValues(cont0, dv0)); + // regression + EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); + + DiscreteValues dv1{{M(1), 1}}; + VectorValues cont1 = bn->optimize(dv1); + double error1 = bn->error(HybridValues(cont1, dv1)); + EXPECT_DOUBLES_EQUAL(error0, error1, 1e-9); } { auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); hfg.push_back( - PriorFactor(X(1), mus[1], prior_noise).linearize(values)); + PriorFactor(X(1), means[1], prior_noise).linearize(values)); auto bn = hfg.eliminateSequential(); HybridValues actual = bn->optimize(); @@ -914,11 +868,11 @@ TEST(GaussianMixtureFactor, DifferentCovariancesFG) { values.insert(X(0), x1); values.insert(X(1), x2); - std::vector mus = {0.0, 0.0}, sigmas = {1e2, 1e-2}; + std::vector means = {0.0, 0.0}, sigmas = {1e2, 1e-2}; // Create FG with GaussianMixtureFactor and prior on X1 HybridGaussianFactorGraph mixture_fg = - CreateFactorGraph(values, mus, sigmas, m1); + CreateFactorGraph(values, means, sigmas, m1); auto hbn = mixture_fg.eliminateSequential(); From 36c0b931a4aed5e1593575468641816ffdc02398 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 6 Sep 2024 10:46:50 -0400 Subject: [PATCH 186/398] replace emplace_back with emplace_shared and make shared_ptr const& --- gtsam/hybrid/HybridBayesNet.h | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 9876e6fc1..8fae4061d 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -98,22 +98,23 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { * @param conditional The conditional as a shared pointer. */ template - void push_back(std::shared_ptr conditional) { + void push_back(const std::shared_ptr &conditional) { factors_.push_back(std::make_shared(conditional)); } /** - * Preferred: add a conditional directly using a pointer. + * Preferred: Emplace a conditional directly using arguments. * * Examples: - * hbn.emplace_back(new GaussianMixture(...))); - * hbn.emplace_back(new GaussianConditional(...))); - * hbn.emplace_back(new DiscreteConditional(...))); + * hbn.emplace_shared(...))); + * hbn.emplace_shared(...))); + * hbn.emplace_shared(...))); */ - template - void emplace_back(Conditional *conditional) { - factors_.push_back(std::make_shared( - std::shared_ptr(conditional))); + template + void emplace_shared(Args &&...args) { + auto cond = std::allocate_shared( + Eigen::aligned_allocator(), std::forward(args)...); + factors_.push_back(std::make_shared(std::move(cond))); } /** From 605542bd0c461da91bc66e4b3e6fc47c83108fac Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 6 Sep 2024 10:46:58 -0400 Subject: [PATCH 187/398] update all tests to use emplace_shared --- gtsam/hybrid/tests/TinyHybridExample.h | 17 ++-- .../tests/testGaussianMixtureFactor.cpp | 37 ++++----- gtsam/hybrid/tests/testHybridBayesNet.cpp | 22 +++--- gtsam/hybrid/tests/testHybridEstimation.cpp | 24 +++--- .../tests/testHybridGaussianFactorGraph.cpp | 79 ++++++++++--------- 5 files changed, 94 insertions(+), 85 deletions(-) diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index 39a1a1a9e..26b83db29 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -43,12 +43,12 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1, // Create Gaussian mixture z_i = x0 + noise for each measurement. for (size_t i = 0; i < num_measurements; i++) { const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode; - bayesNet.emplace_back( - new GaussianMixture({Z(i)}, {X(0)}, {mode_i}, - {GaussianConditional::sharedMeanAndStddev( - Z(i), I_1x1, X(0), Z_1x1, 0.5), - GaussianConditional::sharedMeanAndStddev( - Z(i), I_1x1, X(0), Z_1x1, 3)})); + bayesNet.emplace_shared( + KeyVector{Z(i)}, KeyVector{X(0)}, DiscreteKeys{mode_i}, + std::vector{GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), + Z_1x1, 0.5), + GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), + Z_1x1, 3)}); } // Create prior on X(0). @@ -58,7 +58,7 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1, // Add prior on mode. const size_t nrModes = manyModes ? num_measurements : 1; for (size_t i = 0; i < nrModes; i++) { - bayesNet.emplace_back(new DiscreteConditional({M(i), 2}, "4/6")); + bayesNet.emplace_shared(DiscreteKey{M(i), 2}, "4/6"); } return bayesNet; } @@ -70,8 +70,7 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1, * the generative Bayes net model HybridBayesNet::Example(num_measurements) */ inline HybridGaussianFactorGraph createHybridGaussianFactorGraph( - size_t num_measurements = 1, - std::optional measurements = {}, + size_t num_measurements = 1, std::optional measurements = {}, bool manyModes = false) { auto bayesNet = createHybridBayesNet(num_measurements, manyModes); if (measurements) { diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 5ec600aa8..fcd9dd08f 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -227,12 +227,12 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, auto c0 = make_shared(z, Vector1(mu0), I_1x1, model0), c1 = make_shared(z, Vector1(mu1), I_1x1, model1); - auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1}); - - auto mixing = make_shared(m, "0.5/0.5"); HybridBayesNet hbn; - hbn.emplace_back(gm); + hbn.emplace_shared(KeyVector{z}, KeyVector{}, + DiscreteKeys{m}, std::vector{c0, c1}); + + auto mixing = make_shared(m, "0.5/0.5"); hbn.push_back(mixing); return hbn; @@ -278,7 +278,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { // At the halfway point between the means, we should get P(m|z)=0.5 HybridBayesNet expected; - expected.emplace_back(new DiscreteConditional(m, "0.5/0.5")); + expected.emplace_shared(m, "0.5/0.5"); EXPECT(assert_equal(expected, *bn)); } @@ -350,10 +350,10 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel2) { // At the halfway point between the means HybridBayesNet expected; - expected.emplace_back(new DiscreteConditional( - m, {}, + expected.emplace_shared( + m, DiscreteKeys{}, vector{prob_m_z(mu1, mu0, sigma1, sigma0, m1_high), - prob_m_z(mu0, mu1, sigma0, sigma1, m1_high)})); + prob_m_z(mu0, mu1, sigma0, sigma1, m1_high)}); EXPECT(assert_equal(expected, *bn)); } @@ -401,9 +401,9 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, auto measurement_model = noiseModel::Isotropic::Sigma(1, measurement_sigma); // Add measurement P(z0 | x0) - auto p_z0 = new GaussianConditional(z0, Vector1(0.0), -I_1x1, x0, I_1x1, - measurement_model); - hbn.emplace_back(p_z0); + auto p_z0 = std::make_shared( + z0, Vector1(0.0), -I_1x1, x0, I_1x1, measurement_model); + hbn.push_back(p_z0); // Add hybrid motion model auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); @@ -413,19 +413,20 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, c1 = make_shared(x1, Vector1(mu1), I_1x1, x0, -I_1x1, model1); - auto motion = new GaussianMixture({x1}, {x0}, {m1}, {c0, c1}); - hbn.emplace_back(motion); + auto motion = std::make_shared( + KeyVector{x1}, KeyVector{x0}, DiscreteKeys{m1}, std::vector{c0, c1}); + hbn.push_back(motion); if (add_second_measurement) { // Add second measurement - auto p_z1 = new GaussianConditional(z1, Vector1(0.0), -I_1x1, x1, I_1x1, - measurement_model); - hbn.emplace_back(p_z1); + auto p_z1 = std::make_shared( + z1, Vector1(0.0), -I_1x1, x1, I_1x1, measurement_model); + hbn.push_back(p_z1); } // Discrete uniform prior. - auto p_m1 = new DiscreteConditional(m1, "0.5/0.5"); - hbn.emplace_back(p_m1); + auto p_m1 = std::make_shared(m1, "0.5/0.5"); + hbn.push_back(p_m1); return hbn; } diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 00dc36cd0..3a7e008d8 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -43,7 +43,7 @@ static const DiscreteKey Asia(asiaKey, 2); // Test creation of a pure discrete Bayes net. TEST(HybridBayesNet, Creation) { HybridBayesNet bayesNet; - bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1")); + bayesNet.emplace_shared(Asia, "99/1"); DiscreteConditional expected(Asia, "99/1"); CHECK(bayesNet.at(0)->asDiscrete()); @@ -54,7 +54,7 @@ TEST(HybridBayesNet, Creation) { // Test adding a Bayes net to another one. TEST(HybridBayesNet, Add) { HybridBayesNet bayesNet; - bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1")); + bayesNet.emplace_shared(Asia, "99/1"); HybridBayesNet other; other.add(bayesNet); @@ -65,7 +65,7 @@ TEST(HybridBayesNet, Add) { // Test evaluate for a pure discrete Bayes net P(Asia). TEST(HybridBayesNet, EvaluatePureDiscrete) { HybridBayesNet bayesNet; - bayesNet.emplace_back(new DiscreteConditional(Asia, "4/6")); + bayesNet.emplace_shared(Asia, "4/6"); HybridValues values; values.insert(asiaKey, 0); EXPECT_DOUBLES_EQUAL(0.4, bayesNet.evaluate(values), 1e-9); @@ -107,9 +107,10 @@ TEST(HybridBayesNet, evaluateHybrid) { // Create hybrid Bayes net. HybridBayesNet bayesNet; bayesNet.push_back(continuousConditional); - bayesNet.emplace_back( - new GaussianMixture({X(1)}, {}, {Asia}, {conditional0, conditional1})); - bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1")); + bayesNet.emplace_shared( + KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia}, + std::vector{conditional0, conditional1}); + bayesNet.emplace_shared(Asia, "99/1"); // Create values at which to evaluate. HybridValues values; @@ -167,13 +168,14 @@ TEST(HybridBayesNet, Error) { conditional1 = std::make_shared( X(1), Vector1::Constant(2), I_1x1, model1); - auto gm = - new GaussianMixture({X(1)}, {}, {Asia}, {conditional0, conditional1}); + auto gm = std::make_shared( + KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia}, + std::vector{conditional0, conditional1}); // Create hybrid Bayes net. HybridBayesNet bayesNet; bayesNet.push_back(continuousConditional); - bayesNet.emplace_back(gm); - bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1")); + bayesNet.push_back(gm); + bayesNet.emplace_shared(Asia, "99/1"); // Create values at which to evaluate. HybridValues values; diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 1cc28b386..bdc298762 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -616,12 +616,12 @@ TEST(HybridEstimation, ModeSelection) { GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1)); bn.push_back( GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1)); - bn.emplace_back(new GaussianMixture( - {Z(0)}, {X(0), X(1)}, {mode}, - {GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), - Z_1x1, noise_loose), - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), - Z_1x1, noise_tight)})); + bn.emplace_shared( + KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode}, + std::vector{GaussianConditional::sharedMeanAndStddev( + Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_loose), + GaussianConditional::sharedMeanAndStddev( + Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_tight)}); VectorValues vv; vv.insert(Z(0), Z_1x1); @@ -647,12 +647,12 @@ TEST(HybridEstimation, ModeSelection2) { GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1)); bn.push_back( GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1)); - bn.emplace_back(new GaussianMixture( - {Z(0)}, {X(0), X(1)}, {mode}, - {GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1), - Z_3x1, noise_loose), - GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1), - Z_3x1, noise_tight)})); + bn.emplace_shared( + KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode}, + std::vector{GaussianConditional::sharedMeanAndStddev( + Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_loose), + GaussianConditional::sharedMeanAndStddev( + Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_tight)}); VectorValues vv; vv.insert(Z(0), Z_3x1); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 559f59c8b..a7a315c87 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -651,7 +651,8 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) { } /* ****************************************************************************/ -// Test hybrid gaussian factor graph errorTree when there is a HybridConditional in the graph +// Test hybrid gaussian factor graph errorTree when +// there is a HybridConditional in the graph TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) { using symbol_shorthand::F; @@ -665,12 +666,11 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) { auto measurement_model = noiseModel::Isotropic::Sigma(1, 2.0); // Set a prior P(x0) at x0=0 - hbn.emplace_back( - new GaussianConditional(x0, Vector1(0.0), I_1x1, prior_model)); + hbn.emplace_shared(x0, Vector1(0.0), I_1x1, prior_model); // Add measurement P(z0 | x0) - hbn.emplace_back(new GaussianConditional(z0, Vector1(0.0), -I_1x1, x0, I_1x1, - measurement_model)); + hbn.emplace_shared(z0, Vector1(0.0), -I_1x1, x0, I_1x1, + measurement_model); // Add hybrid motion model double mu = 0.0; @@ -681,10 +681,11 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) { x0, -I_1x1, model0), c1 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, x0, -I_1x1, model1); - hbn.emplace_back(new GaussianMixture({f01}, {x0, x1}, {m1}, {c0, c1})); + hbn.emplace_shared(KeyVector{f01}, KeyVector{x0, x1}, + DiscreteKeys{m1}, std::vector{c0, c1}); // Discrete uniform prior. - hbn.emplace_back(new DiscreteConditional(m1, "0.5/0.5")); + hbn.emplace_shared(m1, "0.5/0.5"); VectorValues given; given.insert(z0, Vector1(0.0)); @@ -804,11 +805,12 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) { X(0), Vector1(14.1421), I_1x1 * 2.82843), conditional1 = std::make_shared( X(0), Vector1(10.1379), I_1x1 * 2.02759); - expectedBayesNet.emplace_back( - new GaussianMixture({X(0)}, {}, {mode}, {conditional0, conditional1})); + expectedBayesNet.emplace_shared( + KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, + std::vector{conditional0, conditional1}); // Add prior on mode. - expectedBayesNet.emplace_back(new DiscreteConditional(mode, "74/26")); + expectedBayesNet.emplace_shared(mode, "74/26"); // Test elimination const auto posterior = fg.eliminateSequential(); @@ -828,18 +830,20 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { HybridBayesNet bn; // Create Gaussian mixture z_0 = x0 + noise for each measurement. - bn.emplace_back(new GaussianMixture( - {Z(0)}, {X(0)}, {mode}, - {GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3), - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, - 0.5)})); + auto gm = std::make_shared( + KeyVector{Z(0)}, KeyVector{X(0)}, DiscreteKeys{mode}, + std::vector{ + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3), + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, + 0.5)}); + bn.push_back(gm); // Create prior on X(0). bn.push_back( GaussianConditional::sharedMeanAndStddev(X(0), Vector1(5.0), 0.5)); // Add prior on mode. - bn.emplace_back(new DiscreteConditional(mode, "1/1")); + bn.emplace_shared(mode, "1/1"); // bn.print(); auto fg = bn.toFactorGraph(measurements); @@ -858,11 +862,12 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { X(0), Vector1(10.1379), I_1x1 * 2.02759), conditional1 = std::make_shared( X(0), Vector1(14.1421), I_1x1 * 2.82843); - expectedBayesNet.emplace_back( - new GaussianMixture({X(0)}, {}, {mode}, {conditional0, conditional1})); + expectedBayesNet.emplace_shared( + KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, + std::vector{conditional0, conditional1}); // Add prior on mode. - expectedBayesNet.emplace_back(new DiscreteConditional(mode, "1/1")); + expectedBayesNet.emplace_shared(mode, "1/1"); // Test elimination const auto posterior = fg.eliminateSequential(); @@ -894,11 +899,12 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) { X(0), Vector1(17.3205), I_1x1 * 3.4641), conditional1 = std::make_shared( X(0), Vector1(10.274), I_1x1 * 2.0548); - expectedBayesNet.emplace_back( - new GaussianMixture({X(0)}, {}, {mode}, {conditional0, conditional1})); + expectedBayesNet.emplace_shared( + KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, + std::vector{conditional0, conditional1}); // Add prior on mode. - expectedBayesNet.emplace_back(new DiscreteConditional(mode, "23/77")); + expectedBayesNet.emplace_shared(mode, "23/77"); // Test elimination const auto posterior = fg.eliminateSequential(); @@ -940,30 +946,31 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { for (size_t t : {0, 1, 2}) { // Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t): const auto noise_mode_t = DiscreteKey{N(t), 2}; - bn.emplace_back( - new GaussianMixture({Z(t)}, {X(t)}, {noise_mode_t}, - {GaussianConditional::sharedMeanAndStddev( - Z(t), I_1x1, X(t), Z_1x1, 0.5), - GaussianConditional::sharedMeanAndStddev( - Z(t), I_1x1, X(t), Z_1x1, 3.0)})); + bn.emplace_shared( + KeyVector{Z(t)}, KeyVector{X(t)}, DiscreteKeys{noise_mode_t}, + std::vector{GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), + Z_1x1, 0.5), + GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), + Z_1x1, 3.0)}); // Create prior on discrete mode N(t): - bn.emplace_back(new DiscreteConditional(noise_mode_t, "20/80")); + bn.emplace_shared(noise_mode_t, "20/80"); } // Add motion models: for (size_t t : {2, 1}) { // Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1): const auto motion_model_t = DiscreteKey{M(t), 2}; - bn.emplace_back( - new GaussianMixture({X(t)}, {X(t - 1)}, {motion_model_t}, - {GaussianConditional::sharedMeanAndStddev( - X(t), I_1x1, X(t - 1), Z_1x1, 0.2), - GaussianConditional::sharedMeanAndStddev( - X(t), I_1x1, X(t - 1), I_1x1, 0.2)})); + auto gm = std::make_shared( + KeyVector{X(t)}, KeyVector{X(t - 1)}, DiscreteKeys{motion_model_t}, + std::vector{GaussianConditional::sharedMeanAndStddev( + X(t), I_1x1, X(t - 1), Z_1x1, 0.2), + GaussianConditional::sharedMeanAndStddev( + X(t), I_1x1, X(t - 1), I_1x1, 0.2)}); + bn.push_back(gm); // Create prior on motion model M(t): - bn.emplace_back(new DiscreteConditional(motion_model_t, "40/60")); + bn.emplace_shared(motion_model_t, "40/60"); } // Create Gaussian prior on continuous X(0) using sharedMeanAndStddev: From 9dc29e008bb9755836a4140be4656dd650691f77 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 6 Sep 2024 11:20:38 -0400 Subject: [PATCH 188/398] fix test --- gtsam/hybrid/tests/testGaussianMixtureFactor.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 21d3fbf83..7452376c9 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -811,7 +811,7 @@ TEST(GaussianMixtureFactor, DifferentMeansFG) { VectorValues cont0 = bn->optimize(dv0); double error0 = bn->error(HybridValues(cont0, dv0)); // regression - EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); + EXPECT_DOUBLES_EQUAL(0.69314718056, error0, 1e-9); DiscreteValues dv1{{M(1), 1}}; VectorValues cont1 = bn->optimize(dv1); From 98cd11d38887a800114c7eb413299c2c474a1538 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 6 Sep 2024 12:24:26 -0400 Subject: [PATCH 189/398] remove gcc9+tbb CI for python wrapper since it takes far too long --- .github/workflows/build-python.yml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/.github/workflows/build-python.yml b/.github/workflows/build-python.yml index cb465309e..57b4435af 100644 --- a/.github/workflows/build-python.yml +++ b/.github/workflows/build-python.yml @@ -29,7 +29,6 @@ jobs: name: [ ubuntu-20.04-gcc-9, - ubuntu-20.04-gcc-9-tbb, ubuntu-20.04-clang-9, macos-12-xcode-14.2, macos-14-xcode-15.4, @@ -44,12 +43,6 @@ jobs: compiler: gcc version: "9" - - name: ubuntu-20.04-gcc-9-tbb - os: ubuntu-20.04 - compiler: gcc - version: "9" - flag: tbb - - name: ubuntu-20.04-clang-9 os: ubuntu-20.04 compiler: clang From 24ec30ea4fe3e6640055bcb7a9424d966d01d591 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 6 Sep 2024 17:32:37 -0400 Subject: [PATCH 190/398] replace emplace_back with emplace_shared --- gtsam/hybrid/tests/testGaussianMixtureFactor.cpp | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index c771edf85..7f6c12979 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -629,12 +629,12 @@ static HybridGaussianFactorGraph GetFactorGraphFromBayesNet( {X(0), H1_1 /*Sp1*/}, {X(1), H1_2 /*Tp2*/}}; // Create conditional P(Z1 | X1, X2, M1) - auto gm = new gtsam::GaussianMixture( - {Z(1)}, {X(0), X(1)}, {m1}, - {std::make_shared(terms0, 1, -d0, model0), - std::make_shared(terms1, 1, -d1, model1)}); + auto conditionals = std::vector{ + std::make_shared(terms0, 1, -d0, model0), + std::make_shared(terms1, 1, -d1, model1)}; gtsam::HybridBayesNet bn; - bn.emplace_back(gm); + bn.emplace_shared(KeyVector{Z(1)}, KeyVector{X(0), X(1)}, + DiscreteKeys{m1}, conditionals); // Create FG via toFactorGraph gtsam::VectorValues measurements; From 28eae9af3919aafaa6b2afd1cb5d88bbb324a633 Mon Sep 17 00:00:00 2001 From: Akshay Krishnan Date: Sun, 8 Sep 2024 14:55:10 -0400 Subject: [PATCH 191/398] add the bata translation factor, with unit tests --- gtsam/sfm/TranslationFactor.h | 76 ++++- gtsam/sfm/TranslationRecovery.cpp | 18 +- gtsam/sfm/TranslationRecovery.h | 8 +- gtsam/sfm/sfm.i | 7 +- gtsam/sfm/tests/testTranslationFactor.cpp | 75 ++++- tests/testTranslationRecovery.cpp | 334 ++++++++++++++++++++++ 6 files changed, 506 insertions(+), 12 deletions(-) diff --git a/gtsam/sfm/TranslationFactor.h b/gtsam/sfm/TranslationFactor.h index 8850d7d2d..ccd0b03ff 100644 --- a/gtsam/sfm/TranslationFactor.h +++ b/gtsam/sfm/TranslationFactor.h @@ -18,6 +18,7 @@ * @brief Binary factor for a relative translation direction measurement. */ +#include #include #include #include @@ -36,8 +37,6 @@ namespace gtsam { * normalized(Tb - Ta) - w_aZb.point3() * * @ingroup sfm - * - * */ class TranslationFactor : public NoiseModelFactorN { private: @@ -56,7 +55,7 @@ class TranslationFactor : public NoiseModelFactorN { * @brief Caclulate error: (norm(Tb - Ta) - measurement) * where Tb and Ta are Point3 translations and measurement is * the Unit3 translation direction from a to b. - * + * * @param Ta translation for key a * @param Tb translation for key b * @param H1 optional jacobian in Ta @@ -69,7 +68,8 @@ class TranslationFactor : public NoiseModelFactorN { boost::optional H2 = boost::none) const override { const Point3 dir = Tb - Ta; Matrix33 H_predicted_dir; - const Point3 predicted = normalize(dir, H1 || H2 ? &H_predicted_dir : nullptr); + const Point3 predicted = + normalize(dir, H1 || H2 ? &H_predicted_dir : nullptr); if (H1) *H1 = -H_predicted_dir; if (H2) *H2 = H_predicted_dir; return predicted - measured_w_aZb_; @@ -83,4 +83,72 @@ class TranslationFactor : public NoiseModelFactorN { "Base", boost::serialization::base_object(*this)); } }; // \ TranslationFactor + +/** + * Binary factor for a relative translation direction measurement + * w_aZb. The measurement equation is + * w_aZb = scale * (Tb - Ta) + * i.e., w_aZb is the translation direction from frame A to B, in world + * coordinates. + * + * Instead of normalizing the Tb - Ta vector, we multiply it by a scalar + * which is also jointly optimized. This is inspired by the work on + * BATA (Zhuang et al, CVPR 2018). + * + * @ingroup sfm + */ +class BilinearAngleTranslationFactor + : public NoiseModelFactorN { + private: + typedef NoiseModelFactorN Base; + Point3 measured_w_aZb_; + + public: + /// default constructor + BilinearAngleTranslationFactor() {} + + BilinearAngleTranslationFactor(Key a, Key b, Key scale_key, const Unit3& w_aZb, + const SharedNoiseModel& noiseModel) + : Base(noiseModel, a, b, scale_key), + measured_w_aZb_(w_aZb.point3()) {} + + /** + * @brief Caclulate error: (scale * (Tb - Ta) - measurement) + * where Tb and Ta are Point3 translations and measurement is + * the Unit3 translation direction from a to b. + * + * @param Ta translation for key a + * @param Tb translation for key b + * @param H1 optional jacobian in Ta + * @param H2 optional jacobian in Tb + * @return * Vector + */ + Vector evaluateError( + const Point3& Ta, const Point3& Tb, const Vector1& scale, + boost::optional H1 = boost::none, + boost::optional H2 = boost::none, + boost::optional H3 = boost::none) const override { + // Ideally we should use a positive real valued scalar datatype for scale. + const double abs_scale = abs(scale[0]); + const Point3 predicted = (Tb - Ta) * abs_scale; + if (H1) { + *H1 = -Matrix3::Identity() * abs_scale; + } + if (H2) { + *H2 = Matrix3::Identity() * abs_scale; + } + if (H3) { + *H3 = scale[0] >= 0 ? (Tb - Ta) : (Ta - Tb); + } + return predicted - measured_w_aZb_; + } + + private: + friend class boost::serialization::access; + template + void serialize(ARCHIVE& ar, const unsigned int /*version*/) { + ar& boost::serialization::make_nvp( + "Base", boost::serialization::base_object(*this)); + } +}; // \ BilinearAngleTranslationFactor } // namespace gtsam diff --git a/gtsam/sfm/TranslationRecovery.cpp b/gtsam/sfm/TranslationRecovery.cpp index c7ef2e526..4c7bb24da 100644 --- a/gtsam/sfm/TranslationRecovery.cpp +++ b/gtsam/sfm/TranslationRecovery.cpp @@ -101,9 +101,17 @@ NonlinearFactorGraph TranslationRecovery::buildGraph( NonlinearFactorGraph graph; // Add translation factors for input translation directions. + uint i = 0; for (auto edge : relativeTranslations) { - graph.emplace_shared(edge.key1(), edge.key2(), - edge.measured(), edge.noiseModel()); + if (use_bilinear_translation_factor_) { + graph.emplace_shared( + edge.key1(), edge.key2(), Symbol('S', i), edge.measured(), + edge.noiseModel()); + } else { + graph.emplace_shared( + edge.key1(), edge.key2(), edge.measured(), edge.noiseModel()); + } + i++; } return graph; } @@ -163,6 +171,12 @@ Values TranslationRecovery::initializeRandomly( insert(edge.key1()); insert(edge.key2()); } + + if (use_bilinear_translation_factor_) { + for (uint i = 0; i < relativeTranslations.size(); i++) { + initial.insert(Symbol('S', i), Vector1(1.0)); + } + } return initial; } diff --git a/gtsam/sfm/TranslationRecovery.h b/gtsam/sfm/TranslationRecovery.h index 44a5ef43e..e5b8f106f 100644 --- a/gtsam/sfm/TranslationRecovery.h +++ b/gtsam/sfm/TranslationRecovery.h @@ -60,14 +60,18 @@ class TranslationRecovery { // Parameters. LevenbergMarquardtParams lmParams_; + const bool use_bilinear_translation_factor_ = false; + public: /** * @brief Construct a new Translation Recovery object * * @param lmParams parameters for optimization. */ - TranslationRecovery(const LevenbergMarquardtParams &lmParams) - : lmParams_(lmParams) {} + TranslationRecovery(const LevenbergMarquardtParams &lmParams, + bool use_bilinear_translation_factor) + : lmParams_(lmParams), + use_bilinear_translation_factor_(use_bilinear_translation_factor) {} /** * @brief Default constructor. diff --git a/gtsam/sfm/sfm.i b/gtsam/sfm/sfm.i index c57e03174..abf63b4cf 100644 --- a/gtsam/sfm/sfm.i +++ b/gtsam/sfm/sfm.i @@ -24,7 +24,7 @@ virtual class SfmTrack : gtsam::SfmTrack2d { SfmTrack(); SfmTrack(const gtsam::Point3& pt); const Point3& point3() const; - + Point3 p; double r; @@ -38,8 +38,8 @@ virtual class SfmTrack : gtsam::SfmTrack2d { bool equals(const gtsam::SfmTrack& expected, double tol) const; }; -#include #include +#include #include class SfmData { SfmData(); @@ -288,7 +288,8 @@ class MFAS { #include class TranslationRecovery { - TranslationRecovery(const gtsam::LevenbergMarquardtParams& lmParams); + TranslationRecovery(const gtsam::LevenbergMarquardtParams& lmParams, + const bool use_bilinear_translation_factor); TranslationRecovery(); // default params. void addPrior(const gtsam::BinaryMeasurementsUnit3& relativeTranslations, const double scale, diff --git a/gtsam/sfm/tests/testTranslationFactor.cpp b/gtsam/sfm/tests/testTranslationFactor.cpp index 818f2bce5..b25745692 100644 --- a/gtsam/sfm/tests/testTranslationFactor.cpp +++ b/gtsam/sfm/tests/testTranslationFactor.cpp @@ -30,7 +30,7 @@ using namespace gtsam; static SharedNoiseModel model(noiseModel::Isotropic::Sigma(3, 0.05)); // Keys are deliberately *not* in sorted order to test that case. -static const Key kKey1(2), kKey2(1); +static const Key kKey1(2), kKey2(1), kEdgeKey(3); static const Unit3 kMeasured(1, 0, 0); /* ************************************************************************* */ @@ -99,6 +99,79 @@ TEST(TranslationFactor, Jacobian) { EXPECT(assert_equal(H2Expected, H2Actual, 1e-9)); } + +/* ************************************************************************* */ +TEST(BilinearAngleTranslationFactor, Constructor) { + BilinearAngleTranslationFactor factor(kKey1, kKey2, kEdgeKey, kMeasured, model); +} + +/* ************************************************************************* */ +TEST(BilinearAngleTranslationFactor, ZeroError) { + // Create a factor + BilinearAngleTranslationFactor factor(kKey1, kKey2, kEdgeKey, kMeasured, model); + + // Set the linearization + Point3 T1(1, 0, 0), T2(2, 0, 0); + Vector1 scale(1.0); + + // Use the factor to calculate the error + Vector actualError(factor.evaluateError(T1, T2, scale)); + + // Verify we get the expected error + Vector expected = (Vector3() << 0, 0, 0).finished(); + EXPECT(assert_equal(expected, actualError, 1e-9)); +} + +/* ************************************************************************* */ +TEST(BilinearAngleTranslationFactor, NonZeroError) { + // create a factor + BilinearAngleTranslationFactor factor(kKey1, kKey2, kEdgeKey, kMeasured, model); + + // set the linearization + Point3 T1(0, 1, 1), T2(0, 2, 2); + Vector1 scale(1.0 / sqrt(2)); + + // use the factor to calculate the error + Vector actualError(factor.evaluateError(T1, T2, scale)); + + // verify we get the expected error + Vector expected = (Vector3() << -1, 1 / sqrt(2), 1 / sqrt(2)).finished(); + EXPECT(assert_equal(expected, actualError, 1e-9)); +} + +/* ************************************************************************* */ +Vector bilinearAngleFactorError(const Point3 &T1, const Point3 &T2, const Vector1 &scale, + const BilinearAngleTranslationFactor &factor) { + return factor.evaluateError(T1, T2, scale); +} + +TEST(BilinearAngleTranslationFactor, Jacobian) { + // Create a factor + BilinearAngleTranslationFactor factor(kKey1, kKey2, kEdgeKey, kMeasured, model); + + // Set the linearization + Point3 T1(1, 0, 0), T2(2, 0, 0); + Vector1 scale(1.0); + + // Use the factor to calculate the Jacobians + Matrix H1Actual, H2Actual, H3Actual; + factor.evaluateError(T1, T2, scale, H1Actual, H2Actual, H3Actual); + + // Use numerical derivatives to calculate the Jacobians + Matrix H1Expected, H2Expected, H3Expected; + H1Expected = numericalDerivative11( + std::bind(&bilinearAngleFactorError, std::placeholders::_1, T2, scale, factor), T1); + H2Expected = numericalDerivative11( + std::bind(&bilinearAngleFactorError, T1, std::placeholders::_1, scale, factor), T2); + H3Expected = numericalDerivative11( + std::bind(&bilinearAngleFactorError, T1, T2, std::placeholders::_1, factor), scale); + + // Verify the Jacobians are correct + EXPECT(assert_equal(H1Expected, H1Actual, 1e-9)); + EXPECT(assert_equal(H2Expected, H2Actual, 1e-9)); + EXPECT(assert_equal(H3Expected, H3Actual, 1e-9)); +} + /* ************************************************************************* */ int main() { TestResult tr; diff --git a/tests/testTranslationRecovery.cpp b/tests/testTranslationRecovery.cpp index 15f1caa1b..6e9c3ed24 100644 --- a/tests/testTranslationRecovery.cpp +++ b/tests/testTranslationRecovery.cpp @@ -353,6 +353,340 @@ TEST(TranslationRecovery, NodeWithBetweenFactorAndNoMeasurements) { EXPECT(assert_equal(Point3(1, 2, 1), result.at(4), 1e-4)); } +/* ************************************************************************* +* Repeat all tests, but with the Bilinear Angle Translation Factor. +* ************************************************************************* */ + + +/* ************************************************************************* */ +// We read the BAL file, which has 3 cameras in it, with poses. We then assume +// the rotations are correct, but translations have to be estimated from +// translation directions only. Since we have 3 cameras, A, B, and C, we can at +// most create three relative measurements, let's call them w_aZb, w_aZc, and +// bZc. These will be of type Unit3. We then call `recoverTranslations` which +// sets up an optimization problem for the three unknown translations. +TEST(TranslationRecovery, BALBATA) { + const string filename = findExampleDataFile("dubrovnik-3-7-pre"); + SfmData db = SfmData::FromBalFile(filename); + + // Get camera poses, as Values + size_t j = 0; + Values poses; + for (auto camera : db.cameras) { + poses.insert(j++, camera.pose()); + } + + // Simulate measurements + const auto relativeTranslations = TranslationRecovery::SimulateMeasurements( + poses, {{0, 1}, {0, 2}, {1, 2}}); + + // Check simulated measurements. + for (auto& unitTranslation : relativeTranslations) { + EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation), + unitTranslation.measured())); + } + + LevenbergMarquardtParams params; + TranslationRecovery algorithm(params, true); + const auto graph = algorithm.buildGraph(relativeTranslations); + EXPECT_LONGS_EQUAL(3, graph.size()); + + // Run translation recovery + const double scale = 2.0; + const auto result = algorithm.run(relativeTranslations, scale); + + // Check result for first two translations, determined by prior + EXPECT(assert_equal(Point3(0, 0, 0), result.at(0))); + EXPECT(assert_equal( + Point3(2 * GetDirectionFromPoses(poses, relativeTranslations[0])), + result.at(1))); + + // Check that the third translations is correct + Point3 Ta = poses.at(0).translation(); + Point3 Tb = poses.at(1).translation(); + Point3 Tc = poses.at(2).translation(); + Point3 expected = (Tc - Ta) * (scale / (Tb - Ta).norm()); + EXPECT(assert_equal(expected, result.at(2), 1e-4)); + + // TODO(frank): how to get stats back? + // EXPECT_DOUBLES_EQUAL(0.0199833, actualError, 1e-5); +} + +TEST(TranslationRecovery, TwoPoseTestBATA) { + // Create a dataset with 2 poses. + // __ __ + // \/ \/ + // 0 _____ 1 + // + // 0 and 1 face in the same direction but have a translation offset. + Values poses; + poses.insert(0, Pose3(Rot3(), Point3(0, 0, 0))); + poses.insert(1, Pose3(Rot3(), Point3(2, 0, 0))); + + auto relativeTranslations = + TranslationRecovery::SimulateMeasurements(poses, {{0, 1}}); + + // Check simulated measurements. + for (auto& unitTranslation : relativeTranslations) { + EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation), + unitTranslation.measured())); + } + + LevenbergMarquardtParams params; + TranslationRecovery algorithm(params, true); + const auto graph = algorithm.buildGraph(relativeTranslations); + EXPECT_LONGS_EQUAL(1, graph.size()); + + // Run translation recovery + const auto result = algorithm.run(relativeTranslations, /*scale=*/3.0); + + // Check result for first two translations, determined by prior + EXPECT(assert_equal(Point3(0, 0, 0), result.at(0), 1e-8)); + EXPECT(assert_equal(Point3(3, 0, 0), result.at(1), 1e-8)); +} + +TEST(TranslationRecovery, ThreePoseTestBATA) { + // Create a dataset with 3 poses. + // __ __ + // \/ \/ + // 0 _____ 1 + // \ __ / + // \\// + // 3 + // + // 0 and 1 face in the same direction but have a translation offset. 3 is in + // the same direction as 0 and 1, in between 0 and 1, with some Y axis offset. + + Values poses; + poses.insert(0, Pose3(Rot3(), Point3(0, 0, 0))); + poses.insert(1, Pose3(Rot3(), Point3(2, 0, 0))); + poses.insert(3, Pose3(Rot3(), Point3(1, -1, 0))); + + auto relativeTranslations = TranslationRecovery::SimulateMeasurements( + poses, {{0, 1}, {1, 3}, {3, 0}}); + + // Check simulated measurements. + for (auto& unitTranslation : relativeTranslations) { + EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation), + unitTranslation.measured())); + } + + LevenbergMarquardtParams params; + TranslationRecovery algorithm(params, true); + const auto graph = algorithm.buildGraph(relativeTranslations); + EXPECT_LONGS_EQUAL(3, graph.size()); + + const auto result = algorithm.run(relativeTranslations, /*scale=*/3.0); + + // Check result + EXPECT(assert_equal(Point3(0, 0, 0), result.at(0), 1e-8)); + EXPECT(assert_equal(Point3(3, 0, 0), result.at(1), 1e-8)); + EXPECT(assert_equal(Point3(1.5, -1.5, 0), result.at(3), 1e-8)); +} + +TEST(TranslationRecovery, ThreePosesIncludingZeroTranslationBATA) { + // Create a dataset with 3 poses. + // __ __ + // \/ \/ + // 0 _____ 1 + // 2 <| + // + // 0 and 1 face in the same direction but have a translation offset. 2 is at + // the same point as 1 but is rotated, with little FOV overlap. + Values poses; + poses.insert(0, Pose3(Rot3(), Point3(0, 0, 0))); + poses.insert(1, Pose3(Rot3(), Point3(2, 0, 0))); + poses.insert(2, Pose3(Rot3::RzRyRx(-M_PI / 2, 0, 0), Point3(2, 0, 0))); + + auto relativeTranslations = + TranslationRecovery::SimulateMeasurements(poses, {{0, 1}, {1, 2}}); + + // Check simulated measurements. + for (auto& unitTranslation : relativeTranslations) { + EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation), + unitTranslation.measured())); + } + + LevenbergMarquardtParams params; + TranslationRecovery algorithm(params, true); + // Run translation recovery + const auto result = algorithm.run(relativeTranslations, /*scale=*/3.0); + + // Check result + EXPECT(assert_equal(Point3(0, 0, 0), result.at(0), 1e-8)); + EXPECT(assert_equal(Point3(3, 0, 0), result.at(1), 1e-8)); + EXPECT(assert_equal(Point3(3, 0, 0), result.at(2), 1e-8)); +} + +TEST(TranslationRecovery, FourPosesIncludingZeroTranslationBATA) { + // Create a dataset with 4 poses. + // __ __ + // \/ \/ + // 0 _____ 1 + // \ __ 2 <| + // \\// + // 3 + // + // 0 and 1 face in the same direction but have a translation offset. 2 is at + // the same point as 1 but is rotated, with very little FOV overlap. 3 is in + // the same direction as 0 and 1, in between 0 and 1, with some Y axis offset. + + Values poses; + poses.insert(0, Pose3(Rot3(), Point3(0, 0, 0))); + poses.insert(1, Pose3(Rot3(), Point3(2, 0, 0))); + poses.insert(2, Pose3(Rot3::RzRyRx(-M_PI / 2, 0, 0), Point3(2, 0, 0))); + poses.insert(3, Pose3(Rot3(), Point3(1, -1, 0))); + + auto relativeTranslations = TranslationRecovery::SimulateMeasurements( + poses, {{0, 1}, {1, 2}, {1, 3}, {3, 0}}); + + // Check simulated measurements. + for (auto& unitTranslation : relativeTranslations) { + EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation), + unitTranslation.measured())); + } + + LevenbergMarquardtParams params; + TranslationRecovery algorithm(params, true); + + // Run translation recovery + const auto result = algorithm.run(relativeTranslations, /*scale=*/4.0); + + // Check result + EXPECT(assert_equal(Point3(0, 0, 0), result.at(0), 1e-8)); + EXPECT(assert_equal(Point3(4, 0, 0), result.at(1), 1e-8)); + EXPECT(assert_equal(Point3(4, 0, 0), result.at(2), 1e-8)); + EXPECT(assert_equal(Point3(2, -2, 0), result.at(3), 1e-8)); +} + +TEST(TranslationRecovery, ThreePosesWithZeroTranslationBATA) { + Values poses; + poses.insert(0, Pose3(Rot3::RzRyRx(-M_PI / 6, 0, 0), Point3(0, 0, 0))); + poses.insert(1, Pose3(Rot3(), Point3(0, 0, 0))); + poses.insert(2, Pose3(Rot3::RzRyRx(M_PI / 6, 0, 0), Point3(0, 0, 0))); + + auto relativeTranslations = TranslationRecovery::SimulateMeasurements( + poses, {{0, 1}, {1, 2}, {2, 0}}); + + // Check simulated measurements. + for (auto& unitTranslation : relativeTranslations) { + EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation), + unitTranslation.measured())); + } + + LevenbergMarquardtParams params; + TranslationRecovery algorithm(params, true); + // Run translation recovery + const auto result = algorithm.run(relativeTranslations, /*scale=*/4.0); + + // Check result + EXPECT(assert_equal(Point3(0, 0, 0), result.at(0), 1e-8)); + EXPECT(assert_equal(Point3(0, 0, 0), result.at(1), 1e-8)); + EXPECT(assert_equal(Point3(0, 0, 0), result.at(2), 1e-8)); +} + +TEST(TranslationRecovery, ThreePosesWithOneSoftConstraintBATA) { + // Create a dataset with 3 poses. + // __ __ + // \/ \/ + // 0 _____ 1 + // \ __ / + // \\// + // 3 + // + // 0 and 1 face in the same direction but have a translation offset. 3 is in + // the same direction as 0 and 1, in between 0 and 1, with some Y axis offset. + + Values poses; + poses.insert(0, Pose3(Rot3(), Point3(0, 0, 0))); + poses.insert(1, Pose3(Rot3(), Point3(2, 0, 0))); + poses.insert(3, Pose3(Rot3(), Point3(1, -1, 0))); + + auto relativeTranslations = TranslationRecovery::SimulateMeasurements( + poses, {{0, 1}, {0, 3}, {1, 3}}); + + std::vector> betweenTranslations; + betweenTranslations.emplace_back(0, 3, Point3(1, -1, 0), + noiseModel::Isotropic::Sigma(3, 1e-2)); + + LevenbergMarquardtParams params; + TranslationRecovery algorithm(params, true); + auto result = + algorithm.run(relativeTranslations, /*scale=*/0.0, betweenTranslations); + + // Check result + EXPECT(assert_equal(Point3(0, 0, 0), result.at(0), 1e-4)); + EXPECT(assert_equal(Point3(2, 0, 0), result.at(1), 1e-4)); + EXPECT(assert_equal(Point3(1, -1, 0), result.at(3), 1e-4)); +} + +TEST(TranslationRecovery, ThreePosesWithOneHardConstraintBATA) { + // Create a dataset with 3 poses. + // __ __ + // \/ \/ + // 0 _____ 1 + // \ __ / + // \\// + // 3 + // + // 0 and 1 face in the same direction but have a translation offset. 3 is in + // the same direction as 0 and 1, in between 0 and 1, with some Y axis offset. + + Values poses; + poses.insert(0, Pose3(Rot3(), Point3(0, 0, 0))); + poses.insert(1, Pose3(Rot3(), Point3(2, 0, 0))); + poses.insert(3, Pose3(Rot3(), Point3(1, -1, 0))); + + auto relativeTranslations = TranslationRecovery::SimulateMeasurements( + poses, {{0, 1}, {0, 3}, {1, 3}}); + + std::vector> betweenTranslations; + betweenTranslations.emplace_back(0, 1, Point3(2, 0, 0), + noiseModel::Constrained::All(3, 1e2)); + + LevenbergMarquardtParams params; + TranslationRecovery algorithm(params, true); + auto result = + algorithm.run(relativeTranslations, /*scale=*/0.0, betweenTranslations); + + // Check result + EXPECT(assert_equal(Point3(0, 0, 0), result.at(0), 1e-4)); + EXPECT(assert_equal(Point3(2, 0, 0), result.at(1), 1e-4)); + EXPECT(assert_equal(Point3(1, -1, 0), result.at(3), 1e-4)); +} + +TEST(TranslationRecovery, NodeWithBetweenFactorAndNoMeasurementsBATA) { + // Checks that valid results are obtained when a between translation edge is + // provided with a node that does not have any other relative translations. + Values poses; + poses.insert(0, Pose3(Rot3(), Point3(0, 0, 0))); + poses.insert(1, Pose3(Rot3(), Point3(2, 0, 0))); + poses.insert(3, Pose3(Rot3(), Point3(1, -1, 0))); + poses.insert(4, Pose3(Rot3(), Point3(1, 2, 1))); + + auto relativeTranslations = TranslationRecovery::SimulateMeasurements( + poses, {{0, 1}, {0, 3}, {1, 3}}); + + std::vector> betweenTranslations; + betweenTranslations.emplace_back(0, 1, Point3(2, 0, 0), + noiseModel::Constrained::All(3, 1e2)); + // Node 4 only has this between translation prior, no relative translations. + betweenTranslations.emplace_back(0, 4, Point3(1, 2, 1)); + + LevenbergMarquardtParams params; + TranslationRecovery algorithm(params, true); + auto result = + algorithm.run(relativeTranslations, /*scale=*/0.0, betweenTranslations); + + // Check result + EXPECT(assert_equal(Point3(0, 0, 0), result.at(0), 1e-4)); + EXPECT(assert_equal(Point3(2, 0, 0), result.at(1), 1e-4)); + EXPECT(assert_equal(Point3(1, -1, 0), result.at(3), 1e-4)); + EXPECT(assert_equal(Point3(1, 2, 1), result.at(4), 1e-4)); +} + + + /* ************************************************************************* */ int main() { TestResult tr; From 30c789dbdb54bacc8ded04b557416a7911b01229 Mon Sep 17 00:00:00 2001 From: Fan Jiang Date: Mon, 2 Sep 2024 23:21:26 -0400 Subject: [PATCH 192/398] Use pybind11-stubgen for generating stubs --- python/CMakeLists.txt | 53 ++++++++++++++++++++++++++----------- python/dev_requirements.txt | 2 +- 2 files changed, 38 insertions(+), 17 deletions(-) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 43f8d78bd..404f5ecc0 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -171,6 +171,7 @@ file(COPY "${GTSAM_SOURCE_DIR}/examples/Data" DESTINATION "${GTSAM_MODULE_PATH}" # Add gtsam as a dependency to the install target set(GTSAM_PYTHON_DEPENDENCIES ${GTSAM_PYTHON_TARGET}) +set(GTSAM_PYTHON_INSTALL_EXTRA "") if(GTSAM_UNSTABLE_BUILD_PYTHON) set(ignore @@ -250,6 +251,22 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON) VERBATIM ) endif() + + add_custom_target( + python-unstable-stubs + COMMAND + ${CMAKE_COMMAND} -E env + "PYTHONPATH=${GTSAM_PYTHON_BUILD_DIRECTORY}/$ENV{PYTHONPATH}" + pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" gtsam_unstable + DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} ${GTSAM_PYTHON_UNSTABLE_TARGET} + WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/" + ) + + if(NOT WIN32) + # Add the stubgen target as a dependency to the install target + list(APPEND GTSAM_PYTHON_INSTALL_EXTRA python-unstable-stubs) + endif() + # Custom make command to run all GTSAM_UNSTABLE Python tests add_custom_target( python-test-unstable @@ -262,26 +279,30 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON) ) endif() +add_custom_target( + python-stubs + COMMAND + ${CMAKE_COMMAND} -E env + "PYTHONPATH=${GTSAM_PYTHON_BUILD_DIRECTORY}/$ENV{PYTHONPATH}" + pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" gtsam + DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} ${GTSAM_PYTHON_TARGET} + WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/" +) + +if(NOT WIN32) + # Add the stubgen target as a dependency to the install target + list(APPEND GTSAM_PYTHON_INSTALL_EXTRA python-stubs) +endif() + # Add custom target so we can install with `make python-install` # Note below we make sure to install with --user iff not in a virtualenv set(GTSAM_PYTHON_INSTALL_TARGET python-install) -#TODO(Varun) Maybe move the long command to script? -# https://stackoverflow.com/questions/49053544/how-do-i-run-a-python-script-every-time-in-a-cmake-build -if (NOT WIN32) # WIN32=1 is target platform is Windows - add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} - COMMAND stubgen -q -p gtsam && cp -a out/gtsam/ gtsam && ${PYTHON_EXECUTABLE} -c "import sys, subprocess; cmd = [sys.executable, '-m', 'pip', 'install']; has_venv = hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix); cmd.append('--user' if not has_venv else ''); cmd.append('.'); subprocess.check_call([c for c in cmd if c])" - DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} - WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY} - VERBATIM) -else() - #TODO(Varun) Find equivalent cp on Windows - add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} - COMMAND ${PYTHON_EXECUTABLE} -c "import sys, subprocess; cmd = [sys.executable, '-m', 'pip', 'install']; has_venv = hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix); cmd.append('--user' if not has_venv else ''); cmd.append('.'); subprocess.check_call([c for c in cmd if c])" - DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} - WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY} - VERBATIM) -endif() +add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} + COMMAND ${PYTHON_EXECUTABLE} -c "import sys, subprocess; cmd = [sys.executable, '-m', 'pip', 'install']; has_venv = hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix); cmd.append('--user' if not has_venv else ''); cmd.append('.'); subprocess.check_call([c for c in cmd if c])" + DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_INSTALL_EXTRA} + WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY} + VERBATIM) # Custom make command to run all GTSAM Python tests add_custom_target( diff --git a/python/dev_requirements.txt b/python/dev_requirements.txt index 6a5e7f924..94ab40189 100644 --- a/python/dev_requirements.txt +++ b/python/dev_requirements.txt @@ -1,3 +1,3 @@ -r requirements.txt pyparsing>=2.4.2 -mypy==1.4.1 #TODO(Varun) A bug in mypy>=1.5.0 causes an unresolved placeholder error when importing numpy>=2.0.0 (https://github.com/python/mypy/issues/17396) \ No newline at end of file +pybind11-stubgen>=2.5.1 \ No newline at end of file From e3dd4e1704bc3a2cdcd3856adc151f38bdbd7407 Mon Sep 17 00:00:00 2001 From: Fan Jiang Date: Thu, 5 Sep 2024 19:39:19 -0400 Subject: [PATCH 193/398] Fix compile error on Clang 18, tweak for better type generation --- gtsam/inference/Factor.h | 1 + python/CMakeLists.txt | 4 ++-- python/gtsam/gtsam.tpl | 6 +++--- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/gtsam/inference/Factor.h b/gtsam/inference/Factor.h index e357a9c88..f073c4975 100644 --- a/gtsam/inference/Factor.h +++ b/gtsam/inference/Factor.h @@ -25,6 +25,7 @@ #include #endif #include +#include #include #include diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 404f5ecc0..674f17d3f 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -257,7 +257,7 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON) COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${GTSAM_PYTHON_BUILD_DIRECTORY}/$ENV{PYTHONPATH}" - pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" gtsam_unstable + pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" --numpy-array-use-type-var gtsam_unstable DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} ${GTSAM_PYTHON_UNSTABLE_TARGET} WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/" ) @@ -284,7 +284,7 @@ add_custom_target( COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${GTSAM_PYTHON_BUILD_DIRECTORY}/$ENV{PYTHONPATH}" - pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" gtsam + pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" --numpy-array-use-type-var gtsam DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} ${GTSAM_PYTHON_TARGET} WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/" ) diff --git a/python/gtsam/gtsam.tpl b/python/gtsam/gtsam.tpl index c72a216a2..6876b4ab4 100644 --- a/python/gtsam/gtsam.tpl +++ b/python/gtsam/gtsam.tpl @@ -39,12 +39,12 @@ namespace py = pybind11; {module_def} {{ m_.doc() = "pybind11 wrapper of {module_name}"; +// Specializations for STL classes +#include "python/gtsam/specializations/{module_name}.h" + {submodules_init} {wrapped_namespace} -// Specializations for STL classes -#include "python/gtsam/specializations/{module_name}.h" - }} From 9021c888ef88e09c20bd8bb22bdcf904887cf779 Mon Sep 17 00:00:00 2001 From: Akshay Krishnan Date: Sun, 8 Sep 2024 14:55:10 -0400 Subject: [PATCH 194/398] add the bata translation factor, with unit tests --- gtsam/sfm/TranslationFactor.h | 76 ++++- gtsam/sfm/TranslationRecovery.cpp | 18 +- gtsam/sfm/TranslationRecovery.h | 8 +- gtsam/sfm/sfm.i | 7 +- gtsam/sfm/tests/testTranslationFactor.cpp | 75 ++++- tests/testTranslationRecovery.cpp | 334 ++++++++++++++++++++++ 6 files changed, 506 insertions(+), 12 deletions(-) diff --git a/gtsam/sfm/TranslationFactor.h b/gtsam/sfm/TranslationFactor.h index a7289f759..f11d6a0d6 100644 --- a/gtsam/sfm/TranslationFactor.h +++ b/gtsam/sfm/TranslationFactor.h @@ -18,6 +18,7 @@ * @brief Binary factor for a relative translation direction measurement. */ +#include #include #include #include @@ -36,8 +37,6 @@ namespace gtsam { * normalized(Tb - Ta) - w_aZb.point3() * * @ingroup sfm - * - * */ class TranslationFactor : public NoiseModelFactorN { private: @@ -60,7 +59,7 @@ class TranslationFactor : public NoiseModelFactorN { * @brief Caclulate error: (norm(Tb - Ta) - measurement) * where Tb and Ta are Point3 translations and measurement is * the Unit3 translation direction from a to b. - * + * * @param Ta translation for key a * @param Tb translation for key b * @param H1 optional jacobian in Ta @@ -73,7 +72,8 @@ class TranslationFactor : public NoiseModelFactorN { OptionalMatrixType H2) const override { const Point3 dir = Tb - Ta; Matrix33 H_predicted_dir; - const Point3 predicted = normalize(dir, H1 || H2 ? &H_predicted_dir : nullptr); + const Point3 predicted = + normalize(dir, H1 || H2 ? &H_predicted_dir : nullptr); if (H1) *H1 = -H_predicted_dir; if (H2) *H2 = H_predicted_dir; return predicted - measured_w_aZb_; @@ -89,4 +89,72 @@ class TranslationFactor : public NoiseModelFactorN { } #endif }; // \ TranslationFactor + +/** + * Binary factor for a relative translation direction measurement + * w_aZb. The measurement equation is + * w_aZb = scale * (Tb - Ta) + * i.e., w_aZb is the translation direction from frame A to B, in world + * coordinates. + * + * Instead of normalizing the Tb - Ta vector, we multiply it by a scalar + * which is also jointly optimized. This is inspired by the work on + * BATA (Zhuang et al, CVPR 2018). + * + * @ingroup sfm + */ +class BilinearAngleTranslationFactor + : public NoiseModelFactorN { + private: + typedef NoiseModelFactorN Base; + Point3 measured_w_aZb_; + + public: + /// default constructor + BilinearAngleTranslationFactor() {} + + BilinearAngleTranslationFactor(Key a, Key b, Key scale_key, const Unit3& w_aZb, + const SharedNoiseModel& noiseModel) + : Base(noiseModel, a, b, scale_key), + measured_w_aZb_(w_aZb.point3()) {} + + /** + * @brief Caclulate error: (scale * (Tb - Ta) - measurement) + * where Tb and Ta are Point3 translations and measurement is + * the Unit3 translation direction from a to b. + * + * @param Ta translation for key a + * @param Tb translation for key b + * @param H1 optional jacobian in Ta + * @param H2 optional jacobian in Tb + * @return * Vector + */ + Vector evaluateError( + const Point3& Ta, const Point3& Tb, const Vector1& scale, + boost::optional H1 = boost::none, + boost::optional H2 = boost::none, + boost::optional H3 = boost::none) const override { + // Ideally we should use a positive real valued scalar datatype for scale. + const double abs_scale = abs(scale[0]); + const Point3 predicted = (Tb - Ta) * abs_scale; + if (H1) { + *H1 = -Matrix3::Identity() * abs_scale; + } + if (H2) { + *H2 = Matrix3::Identity() * abs_scale; + } + if (H3) { + *H3 = scale[0] >= 0 ? (Tb - Ta) : (Ta - Tb); + } + return predicted - measured_w_aZb_; + } + + private: + friend class boost::serialization::access; + template + void serialize(ARCHIVE& ar, const unsigned int /*version*/) { + ar& boost::serialization::make_nvp( + "Base", boost::serialization::base_object(*this)); + } +}; // \ BilinearAngleTranslationFactor } // namespace gtsam diff --git a/gtsam/sfm/TranslationRecovery.cpp b/gtsam/sfm/TranslationRecovery.cpp index c7ef2e526..4c7bb24da 100644 --- a/gtsam/sfm/TranslationRecovery.cpp +++ b/gtsam/sfm/TranslationRecovery.cpp @@ -101,9 +101,17 @@ NonlinearFactorGraph TranslationRecovery::buildGraph( NonlinearFactorGraph graph; // Add translation factors for input translation directions. + uint i = 0; for (auto edge : relativeTranslations) { - graph.emplace_shared(edge.key1(), edge.key2(), - edge.measured(), edge.noiseModel()); + if (use_bilinear_translation_factor_) { + graph.emplace_shared( + edge.key1(), edge.key2(), Symbol('S', i), edge.measured(), + edge.noiseModel()); + } else { + graph.emplace_shared( + edge.key1(), edge.key2(), edge.measured(), edge.noiseModel()); + } + i++; } return graph; } @@ -163,6 +171,12 @@ Values TranslationRecovery::initializeRandomly( insert(edge.key1()); insert(edge.key2()); } + + if (use_bilinear_translation_factor_) { + for (uint i = 0; i < relativeTranslations.size(); i++) { + initial.insert(Symbol('S', i), Vector1(1.0)); + } + } return initial; } diff --git a/gtsam/sfm/TranslationRecovery.h b/gtsam/sfm/TranslationRecovery.h index 4848d7cfa..0765077c0 100644 --- a/gtsam/sfm/TranslationRecovery.h +++ b/gtsam/sfm/TranslationRecovery.h @@ -60,14 +60,18 @@ class GTSAM_EXPORT TranslationRecovery { // Parameters. LevenbergMarquardtParams lmParams_; + const bool use_bilinear_translation_factor_ = false; + public: /** * @brief Construct a new Translation Recovery object * * @param lmParams parameters for optimization. */ - TranslationRecovery(const LevenbergMarquardtParams &lmParams) - : lmParams_(lmParams) {} + TranslationRecovery(const LevenbergMarquardtParams &lmParams, + bool use_bilinear_translation_factor) + : lmParams_(lmParams), + use_bilinear_translation_factor_(use_bilinear_translation_factor) {} /** * @brief Default constructor. diff --git a/gtsam/sfm/sfm.i b/gtsam/sfm/sfm.i index ba25cf793..32b087b2d 100644 --- a/gtsam/sfm/sfm.i +++ b/gtsam/sfm/sfm.i @@ -23,7 +23,7 @@ virtual class SfmTrack : gtsam::SfmTrack2d { SfmTrack(); SfmTrack(const gtsam::Point3& pt); const Point3& point3() const; - + Point3 p; double r; @@ -37,8 +37,8 @@ virtual class SfmTrack : gtsam::SfmTrack2d { bool equals(const gtsam::SfmTrack& expected, double tol) const; }; -#include #include +#include #include class SfmData { SfmData(); @@ -268,7 +268,8 @@ class MFAS { #include class TranslationRecovery { - TranslationRecovery(const gtsam::LevenbergMarquardtParams& lmParams); + TranslationRecovery(const gtsam::LevenbergMarquardtParams& lmParams, + const bool use_bilinear_translation_factor); TranslationRecovery(); // default params. void addPrior(const gtsam::BinaryMeasurementsUnit3& relativeTranslations, const double scale, diff --git a/gtsam/sfm/tests/testTranslationFactor.cpp b/gtsam/sfm/tests/testTranslationFactor.cpp index 818f2bce5..b25745692 100644 --- a/gtsam/sfm/tests/testTranslationFactor.cpp +++ b/gtsam/sfm/tests/testTranslationFactor.cpp @@ -30,7 +30,7 @@ using namespace gtsam; static SharedNoiseModel model(noiseModel::Isotropic::Sigma(3, 0.05)); // Keys are deliberately *not* in sorted order to test that case. -static const Key kKey1(2), kKey2(1); +static const Key kKey1(2), kKey2(1), kEdgeKey(3); static const Unit3 kMeasured(1, 0, 0); /* ************************************************************************* */ @@ -99,6 +99,79 @@ TEST(TranslationFactor, Jacobian) { EXPECT(assert_equal(H2Expected, H2Actual, 1e-9)); } + +/* ************************************************************************* */ +TEST(BilinearAngleTranslationFactor, Constructor) { + BilinearAngleTranslationFactor factor(kKey1, kKey2, kEdgeKey, kMeasured, model); +} + +/* ************************************************************************* */ +TEST(BilinearAngleTranslationFactor, ZeroError) { + // Create a factor + BilinearAngleTranslationFactor factor(kKey1, kKey2, kEdgeKey, kMeasured, model); + + // Set the linearization + Point3 T1(1, 0, 0), T2(2, 0, 0); + Vector1 scale(1.0); + + // Use the factor to calculate the error + Vector actualError(factor.evaluateError(T1, T2, scale)); + + // Verify we get the expected error + Vector expected = (Vector3() << 0, 0, 0).finished(); + EXPECT(assert_equal(expected, actualError, 1e-9)); +} + +/* ************************************************************************* */ +TEST(BilinearAngleTranslationFactor, NonZeroError) { + // create a factor + BilinearAngleTranslationFactor factor(kKey1, kKey2, kEdgeKey, kMeasured, model); + + // set the linearization + Point3 T1(0, 1, 1), T2(0, 2, 2); + Vector1 scale(1.0 / sqrt(2)); + + // use the factor to calculate the error + Vector actualError(factor.evaluateError(T1, T2, scale)); + + // verify we get the expected error + Vector expected = (Vector3() << -1, 1 / sqrt(2), 1 / sqrt(2)).finished(); + EXPECT(assert_equal(expected, actualError, 1e-9)); +} + +/* ************************************************************************* */ +Vector bilinearAngleFactorError(const Point3 &T1, const Point3 &T2, const Vector1 &scale, + const BilinearAngleTranslationFactor &factor) { + return factor.evaluateError(T1, T2, scale); +} + +TEST(BilinearAngleTranslationFactor, Jacobian) { + // Create a factor + BilinearAngleTranslationFactor factor(kKey1, kKey2, kEdgeKey, kMeasured, model); + + // Set the linearization + Point3 T1(1, 0, 0), T2(2, 0, 0); + Vector1 scale(1.0); + + // Use the factor to calculate the Jacobians + Matrix H1Actual, H2Actual, H3Actual; + factor.evaluateError(T1, T2, scale, H1Actual, H2Actual, H3Actual); + + // Use numerical derivatives to calculate the Jacobians + Matrix H1Expected, H2Expected, H3Expected; + H1Expected = numericalDerivative11( + std::bind(&bilinearAngleFactorError, std::placeholders::_1, T2, scale, factor), T1); + H2Expected = numericalDerivative11( + std::bind(&bilinearAngleFactorError, T1, std::placeholders::_1, scale, factor), T2); + H3Expected = numericalDerivative11( + std::bind(&bilinearAngleFactorError, T1, T2, std::placeholders::_1, factor), scale); + + // Verify the Jacobians are correct + EXPECT(assert_equal(H1Expected, H1Actual, 1e-9)); + EXPECT(assert_equal(H2Expected, H2Actual, 1e-9)); + EXPECT(assert_equal(H3Expected, H3Actual, 1e-9)); +} + /* ************************************************************************* */ int main() { TestResult tr; diff --git a/tests/testTranslationRecovery.cpp b/tests/testTranslationRecovery.cpp index 15f1caa1b..6e9c3ed24 100644 --- a/tests/testTranslationRecovery.cpp +++ b/tests/testTranslationRecovery.cpp @@ -353,6 +353,340 @@ TEST(TranslationRecovery, NodeWithBetweenFactorAndNoMeasurements) { EXPECT(assert_equal(Point3(1, 2, 1), result.at(4), 1e-4)); } +/* ************************************************************************* +* Repeat all tests, but with the Bilinear Angle Translation Factor. +* ************************************************************************* */ + + +/* ************************************************************************* */ +// We read the BAL file, which has 3 cameras in it, with poses. We then assume +// the rotations are correct, but translations have to be estimated from +// translation directions only. Since we have 3 cameras, A, B, and C, we can at +// most create three relative measurements, let's call them w_aZb, w_aZc, and +// bZc. These will be of type Unit3. We then call `recoverTranslations` which +// sets up an optimization problem for the three unknown translations. +TEST(TranslationRecovery, BALBATA) { + const string filename = findExampleDataFile("dubrovnik-3-7-pre"); + SfmData db = SfmData::FromBalFile(filename); + + // Get camera poses, as Values + size_t j = 0; + Values poses; + for (auto camera : db.cameras) { + poses.insert(j++, camera.pose()); + } + + // Simulate measurements + const auto relativeTranslations = TranslationRecovery::SimulateMeasurements( + poses, {{0, 1}, {0, 2}, {1, 2}}); + + // Check simulated measurements. + for (auto& unitTranslation : relativeTranslations) { + EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation), + unitTranslation.measured())); + } + + LevenbergMarquardtParams params; + TranslationRecovery algorithm(params, true); + const auto graph = algorithm.buildGraph(relativeTranslations); + EXPECT_LONGS_EQUAL(3, graph.size()); + + // Run translation recovery + const double scale = 2.0; + const auto result = algorithm.run(relativeTranslations, scale); + + // Check result for first two translations, determined by prior + EXPECT(assert_equal(Point3(0, 0, 0), result.at(0))); + EXPECT(assert_equal( + Point3(2 * GetDirectionFromPoses(poses, relativeTranslations[0])), + result.at(1))); + + // Check that the third translations is correct + Point3 Ta = poses.at(0).translation(); + Point3 Tb = poses.at(1).translation(); + Point3 Tc = poses.at(2).translation(); + Point3 expected = (Tc - Ta) * (scale / (Tb - Ta).norm()); + EXPECT(assert_equal(expected, result.at(2), 1e-4)); + + // TODO(frank): how to get stats back? + // EXPECT_DOUBLES_EQUAL(0.0199833, actualError, 1e-5); +} + +TEST(TranslationRecovery, TwoPoseTestBATA) { + // Create a dataset with 2 poses. + // __ __ + // \/ \/ + // 0 _____ 1 + // + // 0 and 1 face in the same direction but have a translation offset. + Values poses; + poses.insert(0, Pose3(Rot3(), Point3(0, 0, 0))); + poses.insert(1, Pose3(Rot3(), Point3(2, 0, 0))); + + auto relativeTranslations = + TranslationRecovery::SimulateMeasurements(poses, {{0, 1}}); + + // Check simulated measurements. + for (auto& unitTranslation : relativeTranslations) { + EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation), + unitTranslation.measured())); + } + + LevenbergMarquardtParams params; + TranslationRecovery algorithm(params, true); + const auto graph = algorithm.buildGraph(relativeTranslations); + EXPECT_LONGS_EQUAL(1, graph.size()); + + // Run translation recovery + const auto result = algorithm.run(relativeTranslations, /*scale=*/3.0); + + // Check result for first two translations, determined by prior + EXPECT(assert_equal(Point3(0, 0, 0), result.at(0), 1e-8)); + EXPECT(assert_equal(Point3(3, 0, 0), result.at(1), 1e-8)); +} + +TEST(TranslationRecovery, ThreePoseTestBATA) { + // Create a dataset with 3 poses. + // __ __ + // \/ \/ + // 0 _____ 1 + // \ __ / + // \\// + // 3 + // + // 0 and 1 face in the same direction but have a translation offset. 3 is in + // the same direction as 0 and 1, in between 0 and 1, with some Y axis offset. + + Values poses; + poses.insert(0, Pose3(Rot3(), Point3(0, 0, 0))); + poses.insert(1, Pose3(Rot3(), Point3(2, 0, 0))); + poses.insert(3, Pose3(Rot3(), Point3(1, -1, 0))); + + auto relativeTranslations = TranslationRecovery::SimulateMeasurements( + poses, {{0, 1}, {1, 3}, {3, 0}}); + + // Check simulated measurements. + for (auto& unitTranslation : relativeTranslations) { + EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation), + unitTranslation.measured())); + } + + LevenbergMarquardtParams params; + TranslationRecovery algorithm(params, true); + const auto graph = algorithm.buildGraph(relativeTranslations); + EXPECT_LONGS_EQUAL(3, graph.size()); + + const auto result = algorithm.run(relativeTranslations, /*scale=*/3.0); + + // Check result + EXPECT(assert_equal(Point3(0, 0, 0), result.at(0), 1e-8)); + EXPECT(assert_equal(Point3(3, 0, 0), result.at(1), 1e-8)); + EXPECT(assert_equal(Point3(1.5, -1.5, 0), result.at(3), 1e-8)); +} + +TEST(TranslationRecovery, ThreePosesIncludingZeroTranslationBATA) { + // Create a dataset with 3 poses. + // __ __ + // \/ \/ + // 0 _____ 1 + // 2 <| + // + // 0 and 1 face in the same direction but have a translation offset. 2 is at + // the same point as 1 but is rotated, with little FOV overlap. + Values poses; + poses.insert(0, Pose3(Rot3(), Point3(0, 0, 0))); + poses.insert(1, Pose3(Rot3(), Point3(2, 0, 0))); + poses.insert(2, Pose3(Rot3::RzRyRx(-M_PI / 2, 0, 0), Point3(2, 0, 0))); + + auto relativeTranslations = + TranslationRecovery::SimulateMeasurements(poses, {{0, 1}, {1, 2}}); + + // Check simulated measurements. + for (auto& unitTranslation : relativeTranslations) { + EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation), + unitTranslation.measured())); + } + + LevenbergMarquardtParams params; + TranslationRecovery algorithm(params, true); + // Run translation recovery + const auto result = algorithm.run(relativeTranslations, /*scale=*/3.0); + + // Check result + EXPECT(assert_equal(Point3(0, 0, 0), result.at(0), 1e-8)); + EXPECT(assert_equal(Point3(3, 0, 0), result.at(1), 1e-8)); + EXPECT(assert_equal(Point3(3, 0, 0), result.at(2), 1e-8)); +} + +TEST(TranslationRecovery, FourPosesIncludingZeroTranslationBATA) { + // Create a dataset with 4 poses. + // __ __ + // \/ \/ + // 0 _____ 1 + // \ __ 2 <| + // \\// + // 3 + // + // 0 and 1 face in the same direction but have a translation offset. 2 is at + // the same point as 1 but is rotated, with very little FOV overlap. 3 is in + // the same direction as 0 and 1, in between 0 and 1, with some Y axis offset. + + Values poses; + poses.insert(0, Pose3(Rot3(), Point3(0, 0, 0))); + poses.insert(1, Pose3(Rot3(), Point3(2, 0, 0))); + poses.insert(2, Pose3(Rot3::RzRyRx(-M_PI / 2, 0, 0), Point3(2, 0, 0))); + poses.insert(3, Pose3(Rot3(), Point3(1, -1, 0))); + + auto relativeTranslations = TranslationRecovery::SimulateMeasurements( + poses, {{0, 1}, {1, 2}, {1, 3}, {3, 0}}); + + // Check simulated measurements. + for (auto& unitTranslation : relativeTranslations) { + EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation), + unitTranslation.measured())); + } + + LevenbergMarquardtParams params; + TranslationRecovery algorithm(params, true); + + // Run translation recovery + const auto result = algorithm.run(relativeTranslations, /*scale=*/4.0); + + // Check result + EXPECT(assert_equal(Point3(0, 0, 0), result.at(0), 1e-8)); + EXPECT(assert_equal(Point3(4, 0, 0), result.at(1), 1e-8)); + EXPECT(assert_equal(Point3(4, 0, 0), result.at(2), 1e-8)); + EXPECT(assert_equal(Point3(2, -2, 0), result.at(3), 1e-8)); +} + +TEST(TranslationRecovery, ThreePosesWithZeroTranslationBATA) { + Values poses; + poses.insert(0, Pose3(Rot3::RzRyRx(-M_PI / 6, 0, 0), Point3(0, 0, 0))); + poses.insert(1, Pose3(Rot3(), Point3(0, 0, 0))); + poses.insert(2, Pose3(Rot3::RzRyRx(M_PI / 6, 0, 0), Point3(0, 0, 0))); + + auto relativeTranslations = TranslationRecovery::SimulateMeasurements( + poses, {{0, 1}, {1, 2}, {2, 0}}); + + // Check simulated measurements. + for (auto& unitTranslation : relativeTranslations) { + EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation), + unitTranslation.measured())); + } + + LevenbergMarquardtParams params; + TranslationRecovery algorithm(params, true); + // Run translation recovery + const auto result = algorithm.run(relativeTranslations, /*scale=*/4.0); + + // Check result + EXPECT(assert_equal(Point3(0, 0, 0), result.at(0), 1e-8)); + EXPECT(assert_equal(Point3(0, 0, 0), result.at(1), 1e-8)); + EXPECT(assert_equal(Point3(0, 0, 0), result.at(2), 1e-8)); +} + +TEST(TranslationRecovery, ThreePosesWithOneSoftConstraintBATA) { + // Create a dataset with 3 poses. + // __ __ + // \/ \/ + // 0 _____ 1 + // \ __ / + // \\// + // 3 + // + // 0 and 1 face in the same direction but have a translation offset. 3 is in + // the same direction as 0 and 1, in between 0 and 1, with some Y axis offset. + + Values poses; + poses.insert(0, Pose3(Rot3(), Point3(0, 0, 0))); + poses.insert(1, Pose3(Rot3(), Point3(2, 0, 0))); + poses.insert(3, Pose3(Rot3(), Point3(1, -1, 0))); + + auto relativeTranslations = TranslationRecovery::SimulateMeasurements( + poses, {{0, 1}, {0, 3}, {1, 3}}); + + std::vector> betweenTranslations; + betweenTranslations.emplace_back(0, 3, Point3(1, -1, 0), + noiseModel::Isotropic::Sigma(3, 1e-2)); + + LevenbergMarquardtParams params; + TranslationRecovery algorithm(params, true); + auto result = + algorithm.run(relativeTranslations, /*scale=*/0.0, betweenTranslations); + + // Check result + EXPECT(assert_equal(Point3(0, 0, 0), result.at(0), 1e-4)); + EXPECT(assert_equal(Point3(2, 0, 0), result.at(1), 1e-4)); + EXPECT(assert_equal(Point3(1, -1, 0), result.at(3), 1e-4)); +} + +TEST(TranslationRecovery, ThreePosesWithOneHardConstraintBATA) { + // Create a dataset with 3 poses. + // __ __ + // \/ \/ + // 0 _____ 1 + // \ __ / + // \\// + // 3 + // + // 0 and 1 face in the same direction but have a translation offset. 3 is in + // the same direction as 0 and 1, in between 0 and 1, with some Y axis offset. + + Values poses; + poses.insert(0, Pose3(Rot3(), Point3(0, 0, 0))); + poses.insert(1, Pose3(Rot3(), Point3(2, 0, 0))); + poses.insert(3, Pose3(Rot3(), Point3(1, -1, 0))); + + auto relativeTranslations = TranslationRecovery::SimulateMeasurements( + poses, {{0, 1}, {0, 3}, {1, 3}}); + + std::vector> betweenTranslations; + betweenTranslations.emplace_back(0, 1, Point3(2, 0, 0), + noiseModel::Constrained::All(3, 1e2)); + + LevenbergMarquardtParams params; + TranslationRecovery algorithm(params, true); + auto result = + algorithm.run(relativeTranslations, /*scale=*/0.0, betweenTranslations); + + // Check result + EXPECT(assert_equal(Point3(0, 0, 0), result.at(0), 1e-4)); + EXPECT(assert_equal(Point3(2, 0, 0), result.at(1), 1e-4)); + EXPECT(assert_equal(Point3(1, -1, 0), result.at(3), 1e-4)); +} + +TEST(TranslationRecovery, NodeWithBetweenFactorAndNoMeasurementsBATA) { + // Checks that valid results are obtained when a between translation edge is + // provided with a node that does not have any other relative translations. + Values poses; + poses.insert(0, Pose3(Rot3(), Point3(0, 0, 0))); + poses.insert(1, Pose3(Rot3(), Point3(2, 0, 0))); + poses.insert(3, Pose3(Rot3(), Point3(1, -1, 0))); + poses.insert(4, Pose3(Rot3(), Point3(1, 2, 1))); + + auto relativeTranslations = TranslationRecovery::SimulateMeasurements( + poses, {{0, 1}, {0, 3}, {1, 3}}); + + std::vector> betweenTranslations; + betweenTranslations.emplace_back(0, 1, Point3(2, 0, 0), + noiseModel::Constrained::All(3, 1e2)); + // Node 4 only has this between translation prior, no relative translations. + betweenTranslations.emplace_back(0, 4, Point3(1, 2, 1)); + + LevenbergMarquardtParams params; + TranslationRecovery algorithm(params, true); + auto result = + algorithm.run(relativeTranslations, /*scale=*/0.0, betweenTranslations); + + // Check result + EXPECT(assert_equal(Point3(0, 0, 0), result.at(0), 1e-4)); + EXPECT(assert_equal(Point3(2, 0, 0), result.at(1), 1e-4)); + EXPECT(assert_equal(Point3(1, -1, 0), result.at(3), 1e-4)); + EXPECT(assert_equal(Point3(1, 2, 1), result.at(4), 1e-4)); +} + + + /* ************************************************************************* */ int main() { TestResult tr; From a6861392c9b564758937621ee341c1ac0aa3ea84 Mon Sep 17 00:00:00 2001 From: Akshay Krishnan Date: Sun, 8 Sep 2024 19:44:28 -0400 Subject: [PATCH 195/398] rebase and remove boost --- gtsam/sfm/TranslationFactor.h | 28 ++++++++++++---------------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/gtsam/sfm/TranslationFactor.h b/gtsam/sfm/TranslationFactor.h index f11d6a0d6..40227eef6 100644 --- a/gtsam/sfm/TranslationFactor.h +++ b/gtsam/sfm/TranslationFactor.h @@ -44,7 +44,6 @@ class TranslationFactor : public NoiseModelFactorN { Point3 measured_w_aZb_; public: - // Provide access to the Matrix& version of evaluateError: using NoiseModelFactor2::evaluateError; @@ -66,10 +65,9 @@ class TranslationFactor : public NoiseModelFactorN { * @param H2 optional jacobian in Tb * @return * Vector */ - Vector evaluateError( - const Point3& Ta, const Point3& Tb, - OptionalMatrixType H1, - OptionalMatrixType H2) const override { + Vector evaluateError(const Point3& Ta, const Point3& Tb, + OptionalMatrixType H1, + OptionalMatrixType H2) const override { const Point3 dir = Tb - Ta; Matrix33 H_predicted_dir; const Point3 predicted = @@ -95,11 +93,11 @@ class TranslationFactor : public NoiseModelFactorN { * w_aZb. The measurement equation is * w_aZb = scale * (Tb - Ta) * i.e., w_aZb is the translation direction from frame A to B, in world - * coordinates. - * + * coordinates. + * * Instead of normalizing the Tb - Ta vector, we multiply it by a scalar * which is also jointly optimized. This is inspired by the work on - * BATA (Zhuang et al, CVPR 2018). + * BATA (Zhuang et al, CVPR 2018). * * @ingroup sfm */ @@ -113,10 +111,10 @@ class BilinearAngleTranslationFactor /// default constructor BilinearAngleTranslationFactor() {} - BilinearAngleTranslationFactor(Key a, Key b, Key scale_key, const Unit3& w_aZb, + BilinearAngleTranslationFactor(Key a, Key b, Key scale_key, + const Unit3& w_aZb, const SharedNoiseModel& noiseModel) - : Base(noiseModel, a, b, scale_key), - measured_w_aZb_(w_aZb.point3()) {} + : Base(noiseModel, a, b, scale_key), measured_w_aZb_(w_aZb.point3()) {} /** * @brief Caclulate error: (scale * (Tb - Ta) - measurement) @@ -129,11 +127,9 @@ class BilinearAngleTranslationFactor * @param H2 optional jacobian in Tb * @return * Vector */ - Vector evaluateError( - const Point3& Ta, const Point3& Tb, const Vector1& scale, - boost::optional H1 = boost::none, - boost::optional H2 = boost::none, - boost::optional H3 = boost::none) const override { + Vector evaluateError(const Point3& Ta, const Point3& Tb, const Vector1& scale, + OptionalMatrixType H1, OptionalMatrixType H2, + OptionalMatrixType H3) const override { // Ideally we should use a positive real valued scalar datatype for scale. const double abs_scale = abs(scale[0]); const Point3 predicted = (Tb - Ta) * abs_scale; From 26a1ccabfc614f4fd82c4ed645c2dbedb9bef30b Mon Sep 17 00:00:00 2001 From: Akshay Krishnan Date: Sun, 8 Sep 2024 20:17:06 -0400 Subject: [PATCH 196/398] add no jacobian version of evaluateerror --- gtsam/sfm/TranslationFactor.h | 3 +++ 1 file changed, 3 insertions(+) diff --git a/gtsam/sfm/TranslationFactor.h b/gtsam/sfm/TranslationFactor.h index 40227eef6..9af3b184e 100644 --- a/gtsam/sfm/TranslationFactor.h +++ b/gtsam/sfm/TranslationFactor.h @@ -116,6 +116,9 @@ class BilinearAngleTranslationFactor const SharedNoiseModel& noiseModel) : Base(noiseModel, a, b, scale_key), measured_w_aZb_(w_aZb.point3()) {} + // Provide access to the Matrix& version of evaluateError: + using NoiseModelFactor2::evaluateError; + /** * @brief Caclulate error: (scale * (Tb - Ta) - measurement) * where Tb and Ta are Point3 translations and measurement is From 3319400625306c18ed4ca12dfb81164b7574de3b Mon Sep 17 00:00:00 2001 From: Akshay Krishnan Date: Sun, 8 Sep 2024 22:21:49 -0400 Subject: [PATCH 197/398] add the default option wrapper --- gtsam/sfm/sfm.i | 1 + 1 file changed, 1 insertion(+) diff --git a/gtsam/sfm/sfm.i b/gtsam/sfm/sfm.i index 32b087b2d..142e65d7e 100644 --- a/gtsam/sfm/sfm.i +++ b/gtsam/sfm/sfm.i @@ -270,6 +270,7 @@ class MFAS { class TranslationRecovery { TranslationRecovery(const gtsam::LevenbergMarquardtParams& lmParams, const bool use_bilinear_translation_factor); + TranslationRecovery(const gtsam::LevenbergMarquardtParams& lmParams); TranslationRecovery(); // default params. void addPrior(const gtsam::BinaryMeasurementsUnit3& relativeTranslations, const double scale, From 551643bf6e25ead5e2d50ed1a61b15d212b5cc90 Mon Sep 17 00:00:00 2001 From: Akshay Krishnan Date: Sun, 8 Sep 2024 22:22:04 -0400 Subject: [PATCH 198/398] minor uint change --- gtsam/sfm/TranslationRecovery.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/sfm/TranslationRecovery.cpp b/gtsam/sfm/TranslationRecovery.cpp index 4c7bb24da..02c78133e 100644 --- a/gtsam/sfm/TranslationRecovery.cpp +++ b/gtsam/sfm/TranslationRecovery.cpp @@ -101,7 +101,7 @@ NonlinearFactorGraph TranslationRecovery::buildGraph( NonlinearFactorGraph graph; // Add translation factors for input translation directions. - uint i = 0; + uint64_t i = 0; for (auto edge : relativeTranslations) { if (use_bilinear_translation_factor_) { graph.emplace_shared( @@ -173,7 +173,7 @@ Values TranslationRecovery::initializeRandomly( } if (use_bilinear_translation_factor_) { - for (uint i = 0; i < relativeTranslations.size(); i++) { + for (uint64_t i = 0; i < relativeTranslations.size(); i++) { initial.insert(Symbol('S', i), Vector1(1.0)); } } From 14d8870c6b6acfcd30536ecae907522a7e07f564 Mon Sep 17 00:00:00 2001 From: Akshay Krishnan Date: Mon, 9 Sep 2024 07:21:28 -0700 Subject: [PATCH 199/398] make boolean parameter optional --- gtsam/sfm/TranslationRecovery.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/sfm/TranslationRecovery.h b/gtsam/sfm/TranslationRecovery.h index 0765077c0..a91ef01f9 100644 --- a/gtsam/sfm/TranslationRecovery.h +++ b/gtsam/sfm/TranslationRecovery.h @@ -69,7 +69,7 @@ class GTSAM_EXPORT TranslationRecovery { * @param lmParams parameters for optimization. */ TranslationRecovery(const LevenbergMarquardtParams &lmParams, - bool use_bilinear_translation_factor) + bool use_bilinear_translation_factor = false) : lmParams_(lmParams), use_bilinear_translation_factor_(use_bilinear_translation_factor) {} From 958a298fee629e1ad960db71c36da9eace09dc11 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 12 Sep 2024 15:37:30 -0700 Subject: [PATCH 200/398] Clean up small things --- gtsam/hybrid/tests/testGaussianMixture.cpp | 1 - .../tests/testGaussianMixtureFactor.cpp | 32 +++++++------------ 2 files changed, 11 insertions(+), 22 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index 4da61912e..7c241acb9 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -31,7 +31,6 @@ #include using namespace gtsam; -using noiseModel::Isotropic; using symbol_shorthand::M; using symbol_shorthand::X; using symbol_shorthand::Z; diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index fcd9dd08f..22426305e 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -35,8 +35,6 @@ using namespace std; using namespace gtsam; -using noiseModel::Isotropic; -using symbol_shorthand::F; using symbol_shorthand::M; using symbol_shorthand::X; using symbol_shorthand::Z; @@ -387,40 +385,38 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel2) { namespace test_two_state_estimation { +DiscreteKey m1(M(1), 2); + /// Create Two State Bayes Network with measurements static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, double sigma1, bool add_second_measurement = false, double prior_sigma = 1e-3, double measurement_sigma = 3.0) { - DiscreteKey m1(M(1), 2); - Key z0 = Z(0), z1 = Z(1); - Key x0 = X(0), x1 = X(1); - HybridBayesNet hbn; auto measurement_model = noiseModel::Isotropic::Sigma(1, measurement_sigma); // Add measurement P(z0 | x0) auto p_z0 = std::make_shared( - z0, Vector1(0.0), -I_1x1, x0, I_1x1, measurement_model); + Z(0), Vector1(0.0), -I_1x1, X(0), I_1x1, measurement_model); hbn.push_back(p_z0); // Add hybrid motion model auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); - auto c0 = make_shared(x1, Vector1(mu0), I_1x1, x0, + auto c0 = make_shared(X(1), Vector1(mu0), I_1x1, X(0), -I_1x1, model0), - c1 = make_shared(x1, Vector1(mu1), I_1x1, x0, + c1 = make_shared(X(1), Vector1(mu1), I_1x1, X(0), -I_1x1, model1); auto motion = std::make_shared( - KeyVector{x1}, KeyVector{x0}, DiscreteKeys{m1}, std::vector{c0, c1}); + KeyVector{X(1)}, KeyVector{X(0)}, DiscreteKeys{m1}, std::vector{c0, c1}); hbn.push_back(motion); if (add_second_measurement) { // Add second measurement auto p_z1 = std::make_shared( - z1, Vector1(0.0), -I_1x1, x1, I_1x1, measurement_model); + Z(1), Vector1(0.0), -I_1x1, X(1), I_1x1, measurement_model); hbn.push_back(p_z1); } @@ -452,14 +448,11 @@ TEST(GaussianMixtureFactor, TwoStateModel) { double mu0 = 1.0, mu1 = 3.0; double sigma = 2.0; - DiscreteKey m1(M(1), 2); - Key z0 = Z(0), z1 = Z(1); - // Start with no measurement on x1, only on x0 HybridBayesNet hbn = CreateBayesNet(mu0, mu1, sigma, sigma, false); VectorValues given; - given.insert(z0, Vector1(0.5)); + given.insert(Z(0), Vector1(0.5)); { HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); @@ -476,7 +469,7 @@ TEST(GaussianMixtureFactor, TwoStateModel) { // If we see z1=2.6 (> 2.5 which is the halfway point), // discrete mode should say m1=1 - given.insert(z1, Vector1(2.6)); + given.insert(Z(1), Vector1(2.6)); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); @@ -508,14 +501,11 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); - DiscreteKey m1(M(1), 2); - Key z0 = Z(0), z1 = Z(1); - // Start with no measurement on x1, only on x0 HybridBayesNet hbn = CreateBayesNet(mu0, mu1, sigma0, sigma1, false); VectorValues given; - given.insert(z0, Vector1(0.5)); + given.insert(Z(0), Vector1(0.5)); { // Start with no measurement on x1, only on x0 @@ -552,7 +542,7 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { // Now we add a measurement z1 on x1 hbn = CreateBayesNet(mu0, mu1, sigma0, sigma1, true); - given.insert(z1, Vector1(2.2)); + given.insert(Z(1), Vector1(2.2)); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); { From 2d58a5bac068b7ad10b22e34ac50377b2081e581 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 12 Sep 2024 18:10:42 -0700 Subject: [PATCH 201/398] Importance sampling --- .../tests/testGaussianMixtureFactor.cpp | 168 ++++++++++++------ 1 file changed, 117 insertions(+), 51 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 22426305e..81a68826a 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -18,7 +18,9 @@ * @date December 2021 */ +#include #include +#include #include #include #include @@ -33,6 +35,8 @@ // Include for test suite #include +#include + using namespace std; using namespace gtsam; using symbol_shorthand::M; @@ -387,46 +391,89 @@ namespace test_two_state_estimation { DiscreteKey m1(M(1), 2); -/// Create Two State Bayes Network with measurements -static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, - double sigma1, - bool add_second_measurement = false, - double prior_sigma = 1e-3, - double measurement_sigma = 3.0) { - HybridBayesNet hbn; - - auto measurement_model = noiseModel::Isotropic::Sigma(1, measurement_sigma); - // Add measurement P(z0 | x0) - auto p_z0 = std::make_shared( - Z(0), Vector1(0.0), -I_1x1, X(0), I_1x1, measurement_model); - hbn.push_back(p_z0); - - // Add hybrid motion model +/// Create hybrid motion model p(x1 | x0, m1) +static GaussianMixture::shared_ptr CreateHybridMotionModel(double mu0, + double mu1, + double sigma0, + double sigma1) { auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); auto c0 = make_shared(X(1), Vector1(mu0), I_1x1, X(0), -I_1x1, model0), c1 = make_shared(X(1), Vector1(mu1), I_1x1, X(0), -I_1x1, model1); - - auto motion = std::make_shared( + return std::make_shared( KeyVector{X(1)}, KeyVector{X(0)}, DiscreteKeys{m1}, std::vector{c0, c1}); - hbn.push_back(motion); +} +/// Create two state Bayes network with 1 or two measurement models +HybridBayesNet CreateBayesNet( + const GaussianMixture::shared_ptr& hybridMotionModel, + bool add_second_measurement = false) { + HybridBayesNet hbn; + + // Add measurement model p(z0 | x0) + const double measurement_sigma = 3.0; + auto measurement_model = noiseModel::Isotropic::Sigma(1, measurement_sigma); + hbn.emplace_shared(Z(0), Vector1(0.0), I_1x1, X(0), + -I_1x1, measurement_model); + + // Optionally add second measurement model p(z1 | x1) if (add_second_measurement) { - // Add second measurement - auto p_z1 = std::make_shared( - Z(1), Vector1(0.0), -I_1x1, X(1), I_1x1, measurement_model); - hbn.push_back(p_z1); + hbn.emplace_shared(Z(1), Vector1(0.0), I_1x1, X(1), + -I_1x1, measurement_model); } + // Add hybrid motion model + hbn.push_back(hybridMotionModel); + // Discrete uniform prior. - auto p_m1 = std::make_shared(m1, "0.5/0.5"); - hbn.push_back(p_m1); + hbn.emplace_shared(m1, "0.5/0.5"); return hbn; } +/// Create importance sampling network p(x1| x0, m1) p(x0) P(m1), +/// using Q(x0) = N(z0, sigma_Q) to sample from p(x0) +HybridBayesNet CreateProposalNet( + const GaussianMixture::shared_ptr& hybridMotionModel, double z0, + double sigma_Q) { + HybridBayesNet hbn; + + // Add hybrid motion model + hbn.push_back(hybridMotionModel); + + // Add proposal Q(x0) for x0 + auto measurement_model = noiseModel::Isotropic::Sigma(1, sigma_Q); + hbn.emplace_shared( + GaussianConditional::FromMeanAndStddev(X(0), Vector1(z0), sigma_Q)); + + // Discrete uniform prior. + hbn.emplace_shared(m1, "0.5/0.5"); + + return hbn; +} + +/// Approximate the discrete marginal P(m1) using importance sampling +/// Not typically called as expensive, but values are used in the tests. +void approximateDiscreteMarginal(const HybridBayesNet& hbn, + const HybridBayesNet& proposalNet, + const VectorValues& given) { + // Do importance sampling + double w0 = 0.0, w1 = 0.0; + std::mt19937_64 rng(44); + for (int i = 0; i < 50000; i++) { + HybridValues sample = proposalNet.sample(&rng); + sample.insert(given); + double weight = hbn.evaluate(sample) / proposalNet.evaluate(sample); + (sample.atDiscrete(m1.first) == 0) ? w0 += weight : w1 += weight; + } + double sumWeights = w0 + w1; + double pm1 = w1 / sumWeights; + std::cout << "p(m0) ~ " << 1.0 - pm1 << std::endl; + std::cout << "p(m1) ~ " << pm1 << std::endl; +} + } // namespace test_two_state_estimation /* ************************************************************************* */ @@ -446,37 +493,48 @@ TEST(GaussianMixtureFactor, TwoStateModel) { using namespace test_two_state_estimation; double mu0 = 1.0, mu1 = 3.0; - double sigma = 2.0; + double sigma = 0.5; + auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma, sigma); // Start with no measurement on x1, only on x0 - HybridBayesNet hbn = CreateBayesNet(mu0, mu1, sigma, sigma, false); + double z0 = 0.5; VectorValues given; - given.insert(Z(0), Vector1(0.5)); + given.insert(Z(0), Vector1(z0)); + + // Create proposal network for importance sampling + auto proposalNet = CreateProposalNet(hybridMotionModel, z0, 3.0); + EXPECT_LONGS_EQUAL(3, proposalNet.size()); { + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); // Since no measurement on x1, we hedge our bets + // Importance sampling run with 50k samples gives 0.49934/0.50066 + // approximateDiscreteMarginal(hbn, proposalNet, given); DiscreteConditional expected(m1, "0.5/0.5"); EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()))); } { // Now we add a measurement z1 on x1 - hbn = CreateBayesNet(mu0, mu1, sigma, sigma, true); + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); - // If we see z1=2.6 (> 2.5 which is the halfway point), + // If we see z1=4.5 (>> 2.5 which is the halfway point), // discrete mode should say m1=1 - given.insert(Z(1), Vector1(2.6)); + const double z1 = 4.5; + given.insert(Z(1), Vector1(z1)); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - // Since we have a measurement on z2, we get a definite result - DiscreteConditional expected(m1, "0.49772729/0.50227271"); - // regression - EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); + // Since we have a measurement on x1, we get a definite result + // Values taken from an importance sampling run with 50k samples: + // approximateDiscreteMarginal(hbn, proposalNet, given); + DiscreteConditional expected(m1, "0.446629/0.553371"); + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.01)); } } @@ -498,22 +556,24 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { double mu0 = 1.0, mu1 = 3.0; double sigma0 = 6.0, sigma1 = 4.0; - auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); - auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); + auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma0, sigma1); // Start with no measurement on x1, only on x0 - HybridBayesNet hbn = CreateBayesNet(mu0, mu1, sigma0, sigma1, false); - + double z0 = 0.5; VectorValues given; - given.insert(Z(0), Vector1(0.5)); + given.insert(Z(0), Vector1(z0)); + + // Create proposal network for importance sampling + // uncomment this and the approximateDiscreteMarginal calls to run + // auto proposalNet = CreateProposalNet(hybridMotionModel, z0, 3.0); { - // Start with no measurement on x1, only on x0 + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); { VectorValues vv{ - {X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}, {Z(0), Vector1(0.5)}}; + {X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}, {Z(0), Vector1(z0)}}; HybridValues hv0(vv, DiscreteValues{{M(1), 0}}), hv1(vv, DiscreteValues{{M(1), 1}}); EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), @@ -521,7 +581,7 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { } { VectorValues vv{ - {X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}, {Z(0), Vector1(0.5)}}; + {X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}, {Z(0), Vector1(z0)}}; HybridValues hv0(vv, DiscreteValues{{M(1), 0}}), hv1(vv, DiscreteValues{{M(1), 1}}); EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), @@ -530,6 +590,9 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + // Importance sampling run with 50k samples gives 0.49934/0.50066 + // approximateDiscreteMarginal(hbn, proposalNet, given); + // Since no measurement on x1, we a 50/50 probability auto p_m = bn->at(2)->asDiscrete(); EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()(DiscreteValues{{m1.first, 0}}), @@ -540,16 +603,18 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { { // Now we add a measurement z1 on x1 - hbn = CreateBayesNet(mu0, mu1, sigma0, sigma1, true); + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); + + double z1 = 2.2; + given.insert(Z(1), Vector1(z1)); - given.insert(Z(1), Vector1(2.2)); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); { VectorValues vv{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}, - {Z(0), Vector1(0.5)}, - {Z(1), Vector1(2.2)}}; + {Z(0), Vector1(z0)}, + {Z(1), Vector1(z1)}}; HybridValues hv0(vv, DiscreteValues{{M(1), 0}}), hv1(vv, DiscreteValues{{M(1), 1}}); EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), @@ -558,8 +623,8 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { { VectorValues vv{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}, - {Z(0), Vector1(0.5)}, - {Z(1), Vector1(2.2)}}; + {Z(0), Vector1(z0)}, + {Z(1), Vector1(z1)}}; HybridValues hv0(vv, DiscreteValues{{M(1), 0}}), hv1(vv, DiscreteValues{{M(1), 1}}); EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), @@ -569,9 +634,10 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); // Since we have a measurement on z2, we get a definite result - DiscreteConditional expected(m1, "0.44744586/0.55255414"); - // regression - EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); + // Values taken from an importance sampling run with 50k samples: + // approximateDiscreteMarginal(hbn, proposalNet, given); + DiscreteConditional expected(m1, "0.446345/0.553655"); + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.01)); } } From e59b3afc2931a6a2c6a6e9e2a277c870754f9c6a Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 12 Sep 2024 18:32:27 -0700 Subject: [PATCH 202/398] Add second test --- .../tests/testGaussianMixtureFactor.cpp | 23 ++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 81a68826a..049986fdb 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -555,7 +555,7 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { using namespace test_two_state_estimation; double mu0 = 1.0, mu1 = 3.0; - double sigma0 = 6.0, sigma1 = 4.0; + double sigma0 = 0.5, sigma1 = 2.0; auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma0, sigma1); // Start with no measurement on x1, only on x0 @@ -605,7 +605,7 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { // Now we add a measurement z1 on x1 HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); - double z1 = 2.2; + double z1 = 4.0; // favors m==1 given.insert(Z(1), Vector1(z1)); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); @@ -636,7 +636,24 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { // Since we have a measurement on z2, we get a definite result // Values taken from an importance sampling run with 50k samples: // approximateDiscreteMarginal(hbn, proposalNet, given); - DiscreteConditional expected(m1, "0.446345/0.553655"); + DiscreteConditional expected(m1, "0.481793/0.518207"); + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.01)); + } + + { + // Add a different measurement z1 on that favors m==0 + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); + + double z1 = 1.1; + given.insert_or_assign(Z(1), Vector1(z1)); + + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Since we have a measurement on z2, we get a definite result + // Values taken from an importance sampling run with 50k samples: + // approximateDiscreteMarginal(hbn, proposalNet, given); + DiscreteConditional expected(m1, "0.554485/0.445515"); EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.01)); } } From 6a92db62c38c8725b238601b17b00b21b2ca1a92 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 13 Sep 2024 00:24:18 -0400 Subject: [PATCH 203/398] rename GaussianMixtureFactor to HybridGaussianFactor --- doc/Hybrid.lyx | 4 +- gtsam/hybrid/GaussianMixture.cpp | 10 ++--- gtsam/hybrid/GaussianMixture.h | 4 +- gtsam/hybrid/GaussianMixtureFactor.cpp | 22 +++++----- gtsam/hybrid/GaussianMixtureFactor.h | 22 +++++----- gtsam/hybrid/HybridFactor.h | 2 +- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 22 +++++----- gtsam/hybrid/HybridGaussianFactorGraph.h | 2 +- gtsam/hybrid/HybridNonlinearFactorGraph.cpp | 6 +-- gtsam/hybrid/MixtureFactor.h | 8 ++-- gtsam/hybrid/hybrid.i | 14 +++---- gtsam/hybrid/tests/Switching.h | 4 +- gtsam/hybrid/tests/testGaussianMixture.cpp | 2 +- .../tests/testGaussianMixtureFactor.cpp | 40 +++++++++---------- gtsam/hybrid/tests/testHybridEstimation.cpp | 6 +-- gtsam/hybrid/tests/testHybridFactorGraph.cpp | 2 +- .../tests/testHybridGaussianFactorGraph.cpp | 22 +++++----- .../tests/testHybridNonlinearFactorGraph.cpp | 4 +- .../hybrid/tests/testSerializationHybrid.cpp | 22 +++++----- python/gtsam/tests/test_HybridFactorGraph.py | 6 +-- 20 files changed, 112 insertions(+), 112 deletions(-) diff --git a/doc/Hybrid.lyx b/doc/Hybrid.lyx index 44df81e39..b5901d495 100644 --- a/doc/Hybrid.lyx +++ b/doc/Hybrid.lyx @@ -548,13 +548,13 @@ with \end_layout \begin_layout Subsubsection* -GaussianMixtureFactor +HybridGaussianFactor \end_layout \begin_layout Standard Analogously, a \emph on -GaussianMixtureFactor +HybridGaussianFactor \emph default typically results from a GaussianMixture by having known values \begin_inset Formula $\bar{x}$ diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index ac1956d5f..feb49b6a2 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -21,7 +21,7 @@ #include #include #include -#include +#include #include #include #include @@ -72,7 +72,7 @@ GaussianMixture::GaussianMixture( /* *******************************************************************************/ // TODO(dellaert): This is copy/paste: GaussianMixture should be derived from -// GaussianMixtureFactor, no? +// HybridGaussianFactor, no? GaussianFactorGraphTree GaussianMixture::add( const GaussianFactorGraphTree &sum) const { using Y = GaussianFactorGraph; @@ -203,7 +203,7 @@ bool GaussianMixture::allFrontalsGiven(const VectorValues &given) const { } /* ************************************************************************* */ -std::shared_ptr GaussianMixture::likelihood( +std::shared_ptr GaussianMixture::likelihood( const VectorValues &given) const { if (!allFrontalsGiven(given)) { throw std::runtime_error( @@ -212,7 +212,7 @@ std::shared_ptr GaussianMixture::likelihood( const DiscreteKeys discreteParentKeys = discreteKeys(); const KeyVector continuousParentKeys = continuousParents(); - const GaussianMixtureFactor::Factors likelihoods( + const HybridGaussianFactor::Factors likelihoods( conditionals_, [&](const GaussianConditional::shared_ptr &conditional) { const auto likelihood_m = conditional->likelihood(given); const double Cgm_Kgcm = @@ -231,7 +231,7 @@ std::shared_ptr GaussianMixture::likelihood( return std::make_shared(gfg); } }); - return std::make_shared( + return std::make_shared( continuousParentKeys, discreteParentKeys, likelihoods); } diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index 714c00272..10feaf55b 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -23,7 +23,7 @@ #include #include #include -#include +#include #include #include #include @@ -165,7 +165,7 @@ class GTSAM_EXPORT GaussianMixture * Create a likelihood factor for a Gaussian mixture, return a Mixture factor * on the parents. */ - std::shared_ptr likelihood( + std::shared_ptr likelihood( const VectorValues &given) const; /// Getter for the underlying Conditionals DecisionTree diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 94bc09407..4445d875f 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file GaussianMixtureFactor.cpp + * @file HybridGaussianFactor.cpp * @brief A set of Gaussian factors indexed by a set of discrete keys. * @author Fan Jiang * @author Varun Agrawal @@ -21,7 +21,7 @@ #include #include #include -#include +#include #include #include #include @@ -29,13 +29,13 @@ namespace gtsam { /* *******************************************************************************/ -GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, +HybridGaussianFactor::HybridGaussianFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, const Factors &factors) : Base(continuousKeys, discreteKeys), factors_(factors) {} /* *******************************************************************************/ -bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { +bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const { const This *e = dynamic_cast(&lf); if (e == nullptr) return false; @@ -52,10 +52,10 @@ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { } /* *******************************************************************************/ -void GaussianMixtureFactor::print(const std::string &s, +void HybridGaussianFactor::print(const std::string &s, const KeyFormatter &formatter) const { std::cout << (s.empty() ? "" : s + "\n"); - std::cout << "GaussianMixtureFactor" << std::endl; + std::cout << "HybridGaussianFactor" << std::endl; HybridFactor::print("", formatter); std::cout << "{\n"; if (factors_.empty()) { @@ -78,13 +78,13 @@ void GaussianMixtureFactor::print(const std::string &s, } /* *******************************************************************************/ -GaussianMixtureFactor::sharedFactor GaussianMixtureFactor::operator()( +HybridGaussianFactor::sharedFactor HybridGaussianFactor::operator()( const DiscreteValues &assignment) const { return factors_(assignment); } /* *******************************************************************************/ -GaussianFactorGraphTree GaussianMixtureFactor::add( +GaussianFactorGraphTree HybridGaussianFactor::add( const GaussianFactorGraphTree &sum) const { using Y = GaussianFactorGraph; auto add = [](const Y &graph1, const Y &graph2) { @@ -97,14 +97,14 @@ GaussianFactorGraphTree GaussianMixtureFactor::add( } /* *******************************************************************************/ -GaussianFactorGraphTree GaussianMixtureFactor::asGaussianFactorGraphTree() +GaussianFactorGraphTree HybridGaussianFactor::asGaussianFactorGraphTree() const { auto wrap = [](const sharedFactor &gf) { return GaussianFactorGraph{gf}; }; return {factors_, wrap}; } /* *******************************************************************************/ -AlgebraicDecisionTree GaussianMixtureFactor::errorTree( +AlgebraicDecisionTree HybridGaussianFactor::errorTree( const VectorValues &continuousValues) const { // functor to convert from sharedFactor to double error value. auto errorFunc = [&continuousValues](const sharedFactor &gf) { @@ -115,7 +115,7 @@ AlgebraicDecisionTree GaussianMixtureFactor::errorTree( } /* *******************************************************************************/ -double GaussianMixtureFactor::error(const HybridValues &values) const { +double HybridGaussianFactor::error(const HybridValues &values) const { const sharedFactor gf = factors_(values.discrete()); return gf->error(values.continuous()); } diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 2459af259..467412b1d 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file GaussianMixtureFactor.h + * @file HybridGaussianFactor.h * @brief A set of GaussianFactors, indexed by a set of discrete keys. * @author Fan Jiang * @author Varun Agrawal @@ -44,10 +44,10 @@ class VectorValues; * * @ingroup hybrid */ -class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { +class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { public: using Base = HybridFactor; - using This = GaussianMixtureFactor; + using This = HybridGaussianFactor; using shared_ptr = std::shared_ptr; using sharedFactor = std::shared_ptr; @@ -72,7 +72,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { /// @{ /// Default constructor, mainly for serialization. - GaussianMixtureFactor() = default; + HybridGaussianFactor() = default; /** * @brief Construct a new Gaussian mixture factor. @@ -83,22 +83,22 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param factors The decision tree of Gaussian factors stored * as the mixture density. */ - GaussianMixtureFactor(const KeyVector &continuousKeys, + HybridGaussianFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, const Factors &factors); /** - * @brief Construct a new GaussianMixtureFactor object using a vector of + * @brief Construct a new HybridGaussianFactor object using a vector of * GaussianFactor shared pointers. * * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. */ - GaussianMixtureFactor(const KeyVector &continuousKeys, + HybridGaussianFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, const std::vector &factors) - : GaussianMixtureFactor(continuousKeys, discreteKeys, + : HybridGaussianFactor(continuousKeys, discreteKeys, Factors(discreteKeys, factors)) {} /// @} @@ -128,7 +128,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const; /** - * @brief Compute error of the GaussianMixtureFactor as a tree. + * @brief Compute error of the HybridGaussianFactor as a tree. * * @param continuousValues The continuous VectorValues. * @return AlgebraicDecisionTree A decision tree with the same keys @@ -148,7 +148,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { /// Add MixtureFactor to a Sum, syntactic sugar. friend GaussianFactorGraphTree &operator+=( - GaussianFactorGraphTree &sum, const GaussianMixtureFactor &factor) { + GaussianFactorGraphTree &sum, const HybridGaussianFactor &factor) { sum = factor.add(sum); return sum; } @@ -168,7 +168,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { // traits template <> -struct traits : public Testable { +struct traits : public Testable { }; } // namespace gtsam diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index a9c0e53d2..4a68f851b 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -46,7 +46,7 @@ DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1, * * Examples: * - MixtureFactor - * - GaussianMixtureFactor + * - HybridGaussianFactor * - GaussianMixture * * @ingroup hybrid diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index efcd322aa..24669863e 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -24,7 +24,7 @@ #include #include #include -#include +#include #include #include #include @@ -92,7 +92,7 @@ void HybridGaussianFactorGraph::printErrors( // Clear the stringstream ss.str(std::string()); - if (auto gmf = std::dynamic_pointer_cast(factor)) { + if (auto gmf = std::dynamic_pointer_cast(factor)) { if (factor == nullptr) { std::cout << "nullptr" << "\n"; @@ -178,7 +178,7 @@ GaussianFactorGraphTree HybridGaussianFactorGraph::assembleGraphTree() const { // TODO(dellaert): just use a virtual method defined in HybridFactor. if (auto gf = dynamic_pointer_cast(f)) { result = addGaussian(result, gf); - } else if (auto gmf = dynamic_pointer_cast(f)) { + } else if (auto gmf = dynamic_pointer_cast(f)) { result = gmf->add(result); } else if (auto gm = dynamic_pointer_cast(f)) { result = gm->add(result); @@ -258,8 +258,8 @@ discreteElimination(const HybridGaussianFactorGraph &factors, for (auto &f : factors) { if (auto df = dynamic_pointer_cast(f)) { dfg.push_back(df); - } else if (auto gmf = dynamic_pointer_cast(f)) { - // Case where we have a GaussianMixtureFactor with no continuous keys. + } else if (auto gmf = dynamic_pointer_cast(f)) { + // Case where we have a HybridGaussianFactor with no continuous keys. // In this case, compute discrete probabilities. auto logProbability = [&](const GaussianFactor::shared_ptr &factor) -> double { @@ -309,7 +309,7 @@ GaussianFactorGraphTree removeEmpty(const GaussianFactorGraphTree &sum) { /* ************************************************************************ */ using Result = std::pair, - GaussianMixtureFactor::sharedFactor>; + HybridGaussianFactor::sharedFactor>; /** * Compute the probability p(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m) @@ -341,7 +341,7 @@ static std::shared_ptr createDiscreteFactor( return std::make_shared(discreteSeparator, probabilities); } -// Create GaussianMixtureFactor on the separator, taking care to correct +// Create HybridGaussianFactor on the separator, taking care to correct // for conditional constants. static std::shared_ptr createGaussianMixtureFactor( const DecisionTree &eliminationResults, @@ -362,7 +362,7 @@ static std::shared_ptr createGaussianMixtureFactor( DecisionTree newFactors(eliminationResults, correct); - return std::make_shared(continuousSeparator, + return std::make_shared(continuousSeparator, discreteSeparator, newFactors); } @@ -400,7 +400,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, DecisionTree eliminationResults(factorGraphTree, eliminate); // If there are no more continuous parents we create a DiscreteFactor with the - // error for each discrete choice. Otherwise, create a GaussianMixtureFactor + // error for each discrete choice. Otherwise, create a HybridGaussianFactor // on the separator, taking care to correct for conditional constants. auto newFactor = continuousSeparator.empty() @@ -549,7 +549,7 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::errorTree( f = hc->inner(); } - if (auto gaussianMixture = dynamic_pointer_cast(f)) { + if (auto gaussianMixture = dynamic_pointer_cast(f)) { // Compute factor error and add it. error_tree = error_tree + gaussianMixture->errorTree(continuousValues); } else if (auto gaussian = dynamic_pointer_cast(f)) { @@ -597,7 +597,7 @@ GaussianFactorGraph HybridGaussianFactorGraph::operator()( gfg.push_back(gf); } else if (auto gc = std::dynamic_pointer_cast(f)) { gfg.push_back(gf); - } else if (auto gmf = std::dynamic_pointer_cast(f)) { + } else if (auto gmf = std::dynamic_pointer_cast(f)) { gfg.push_back((*gmf)(assignment)); } else if (auto gm = dynamic_pointer_cast(f)) { gfg.push_back((*gm)(assignment)); diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 8a0a6f0a3..e6ce4467c 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -18,7 +18,7 @@ #pragma once -#include +#include #include #include #include diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp index cdd448412..923058d05 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -70,7 +70,7 @@ void HybridNonlinearFactorGraph::printErrors( std::cout << std::endl; } } else if (auto gmf = - std::dynamic_pointer_cast(factor)) { + std::dynamic_pointer_cast(factor)) { if (factor == nullptr) { std::cout << "nullptr" << "\n"; @@ -152,7 +152,7 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize( } // Check if it is a nonlinear mixture factor if (auto mf = dynamic_pointer_cast(f)) { - const GaussianMixtureFactor::shared_ptr& gmf = + const HybridGaussianFactor::shared_ptr& gmf = mf->linearize(continuousValues); linearFG->push_back(gmf); } else if (auto nlf = dynamic_pointer_cast(f)) { @@ -161,7 +161,7 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize( } else if (dynamic_pointer_cast(f)) { // If discrete-only: doesn't need linearization. linearFG->push_back(f); - } else if (auto gmf = dynamic_pointer_cast(f)) { + } else if (auto gmf = dynamic_pointer_cast(f)) { linearFG->push_back(gmf); } else if (auto gm = dynamic_pointer_cast(f)) { linearFG->push_back(gm); diff --git a/gtsam/hybrid/MixtureFactor.h b/gtsam/hybrid/MixtureFactor.h index 09a641b48..d2e0e7822 100644 --- a/gtsam/hybrid/MixtureFactor.h +++ b/gtsam/hybrid/MixtureFactor.h @@ -20,7 +20,7 @@ #pragma once #include -#include +#include #include #include #include @@ -233,8 +233,8 @@ class MixtureFactor : public HybridFactor { return factor->linearize(continuousValues); } - /// Linearize all the continuous factors to get a GaussianMixtureFactor. - std::shared_ptr linearize( + /// Linearize all the continuous factors to get a HybridGaussianFactor. + std::shared_ptr linearize( const Values& continuousValues) const { // functional to linearize each factor in the decision tree auto linearizeDT = [continuousValues](const sharedFactor& factor) { @@ -244,7 +244,7 @@ class MixtureFactor : public HybridFactor { DecisionTree linearized_factors( factors_, linearizeDT); - return std::make_shared( + return std::make_shared( continuousKeys_, discreteKeys_, linearized_factors); } diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index 85c34e575..0c72d5046 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -72,14 +72,14 @@ virtual class HybridConditional { double error(const gtsam::HybridValues& values) const; }; -#include -class GaussianMixtureFactor : gtsam::HybridFactor { - GaussianMixtureFactor( +#include +class HybridGaussianFactor : gtsam::HybridFactor { + HybridGaussianFactor( const gtsam::KeyVector& continuousKeys, const gtsam::DiscreteKeys& discreteKeys, const std::vector& factorsList); - void print(string s = "GaussianMixtureFactor\n", + void print(string s = "HybridGaussianFactor\n", const gtsam::KeyFormatter& keyFormatter = gtsam::DefaultKeyFormatter) const; }; @@ -92,7 +92,7 @@ class GaussianMixture : gtsam::HybridFactor { const std::vector& conditionalsList); - gtsam::GaussianMixtureFactor* likelihood( + gtsam::HybridGaussianFactor* likelihood( const gtsam::VectorValues& frontals) const; double logProbability(const gtsam::HybridValues& values) const; double evaluate(const gtsam::HybridValues& values) const; @@ -177,7 +177,7 @@ class HybridGaussianFactorGraph { void push_back(const gtsam::HybridGaussianFactorGraph& graph); void push_back(const gtsam::HybridBayesNet& bayesNet); void push_back(const gtsam::HybridBayesTree& bayesTree); - void push_back(const gtsam::GaussianMixtureFactor* gmm); + void push_back(const gtsam::HybridGaussianFactor* gmm); void push_back(gtsam::DecisionTreeFactor* factor); void push_back(gtsam::TableFactor* factor); void push_back(gtsam::JacobianFactor* factor); @@ -253,7 +253,7 @@ class MixtureFactor : gtsam::HybridFactor { double nonlinearFactorLogNormalizingConstant(const gtsam::NonlinearFactor* factor, const gtsam::Values& values) const; - GaussianMixtureFactor* linearize( + HybridGaussianFactor* linearize( const gtsam::Values& continuousValues) const; void print(string s = "MixtureFactor\n", diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 4b2d3f11b..126b15c6e 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -19,7 +19,7 @@ #include #include #include -#include +#include #include #include #include @@ -57,7 +57,7 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( // keyFunc(1) to keyFunc(n+1) for (size_t t = 1; t < n; t++) { - hfg.add(GaussianMixtureFactor( + hfg.add(HybridGaussianFactor( {keyFunc(t), keyFunc(t + 1)}, {{dKeyFunc(t), 2}}, {std::make_shared(keyFunc(t), I_3x3, keyFunc(t + 1), I_3x3, Z_3x1), diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index 4da61912e..a379dd036 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -20,7 +20,7 @@ #include #include -#include +#include #include #include #include diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index fcd9dd08f..cd20e7215 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -11,7 +11,7 @@ /** * @file testGaussianMixtureFactor.cpp - * @brief Unit tests for GaussianMixtureFactor + * @brief Unit tests for HybridGaussianFactor * @author Varun Agrawal * @author Fan Jiang * @author Frank Dellaert @@ -21,7 +21,7 @@ #include #include #include -#include +#include #include #include #include @@ -43,17 +43,17 @@ using symbol_shorthand::Z; /* ************************************************************************* */ // Check iterators of empty mixture. -TEST(GaussianMixtureFactor, Constructor) { - GaussianMixtureFactor factor; - GaussianMixtureFactor::const_iterator const_it = factor.begin(); +TEST(HybridGaussianFactor, Constructor) { + HybridGaussianFactor factor; + HybridGaussianFactor::const_iterator const_it = factor.begin(); CHECK(const_it == factor.end()); - GaussianMixtureFactor::iterator it = factor.begin(); + HybridGaussianFactor::iterator it = factor.begin(); CHECK(it == factor.end()); } /* ************************************************************************* */ // "Add" two mixture factors together. -TEST(GaussianMixtureFactor, Sum) { +TEST(HybridGaussianFactor, Sum) { DiscreteKey m1(1, 2), m2(2, 3); auto A1 = Matrix::Zero(2, 1); @@ -74,8 +74,8 @@ TEST(GaussianMixtureFactor, Sum) { // TODO(Frank): why specify keys at all? And: keys in factor should be *all* // keys, deviating from Kevin's scheme. Should we index DT on DiscreteKey? // Design review! - GaussianMixtureFactor mixtureFactorA({X(1), X(2)}, {m1}, factorsA); - GaussianMixtureFactor mixtureFactorB({X(1), X(3)}, {m2}, factorsB); + HybridGaussianFactor mixtureFactorA({X(1), X(2)}, {m1}, factorsA); + HybridGaussianFactor mixtureFactorB({X(1), X(3)}, {m2}, factorsB); // Check that number of keys is 3 EXPECT_LONGS_EQUAL(3, mixtureFactorA.keys().size()); @@ -99,7 +99,7 @@ TEST(GaussianMixtureFactor, Sum) { } /* ************************************************************************* */ -TEST(GaussianMixtureFactor, Printing) { +TEST(HybridGaussianFactor, Printing) { DiscreteKey m1(1, 2); auto A1 = Matrix::Zero(2, 1); auto A2 = Matrix::Zero(2, 2); @@ -108,10 +108,10 @@ TEST(GaussianMixtureFactor, Printing) { auto f11 = std::make_shared(X(1), A1, X(2), A2, b); std::vector factors{f10, f11}; - GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); + HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors); std::string expected = - R"(GaussianMixtureFactor + R"(HybridGaussianFactor Hybrid [x1 x2; 1]{ Choice(1) 0 Leaf : @@ -144,7 +144,7 @@ Hybrid [x1 x2; 1]{ } /* ************************************************************************* */ -TEST(GaussianMixtureFactor, GaussianMixture) { +TEST(HybridGaussianFactor, GaussianMixture) { KeyVector keys; keys.push_back(X(0)); keys.push_back(X(1)); @@ -161,8 +161,8 @@ TEST(GaussianMixtureFactor, GaussianMixture) { } /* ************************************************************************* */ -// Test the error of the GaussianMixtureFactor -TEST(GaussianMixtureFactor, Error) { +// Test the error of the HybridGaussianFactor +TEST(HybridGaussianFactor, Error) { DiscreteKey m1(1, 2); auto A01 = Matrix2::Identity(); @@ -177,7 +177,7 @@ TEST(GaussianMixtureFactor, Error) { auto f1 = std::make_shared(X(1), A11, X(2), A12, b); std::vector factors{f0, f1}; - GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); + HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors); VectorValues continuousValues; continuousValues.insert(X(1), Vector2(0, 0)); @@ -250,7 +250,7 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, * The resulting factor graph should eliminate to a Bayes net * which represents a sigmoid function. */ -TEST(GaussianMixtureFactor, GaussianMixtureModel) { +TEST(HybridGaussianFactor, GaussianMixtureModel) { using namespace test_gmm; double mu0 = 1.0, mu1 = 3.0; @@ -322,7 +322,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { * which represents a Gaussian-like function * where m1>m0 close to 3.1333. */ -TEST(GaussianMixtureFactor, GaussianMixtureModel2) { +TEST(HybridGaussianFactor, GaussianMixtureModel2) { using namespace test_gmm; double mu0 = 1.0, mu1 = 3.0; @@ -446,7 +446,7 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, * the probability of m1 should be 0.5/0.5. * Getting a measurement on z1 gives use more information. */ -TEST(GaussianMixtureFactor, TwoStateModel) { +TEST(HybridGaussianFactor, TwoStateModel) { using namespace test_two_state_estimation; double mu0 = 1.0, mu1 = 3.0; @@ -500,7 +500,7 @@ TEST(GaussianMixtureFactor, TwoStateModel) { * the P(m1) should be 0.5/0.5. * Getting a measurement on z1 gives use more information. */ -TEST(GaussianMixtureFactor, TwoStateModel2) { +TEST(HybridGaussianFactor, TwoStateModel2) { using namespace test_two_state_estimation; double mu0 = 1.0, mu1 = 3.0; diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index bdc298762..2ec9c514a 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -531,10 +531,10 @@ TEST(HybridEstimation, CorrectnessViaSampling) { * Helper function to add the constant term corresponding to * the difference in noise models. */ -std::shared_ptr mixedVarianceFactor( +std::shared_ptr mixedVarianceFactor( const MixtureFactor& mf, const Values& initial, const Key& mode, double noise_tight, double noise_loose, size_t d, size_t tight_index) { - GaussianMixtureFactor::shared_ptr gmf = mf.linearize(initial); + HybridGaussianFactor::shared_ptr gmf = mf.linearize(initial); constexpr double log2pi = 1.8378770664093454835606594728112; // logConstant will be of the tighter model @@ -560,7 +560,7 @@ std::shared_ptr mixedVarianceFactor( } }; auto updated_components = gmf->factors().apply(func); - auto updated_gmf = std::make_shared( + auto updated_gmf = std::make_shared( gmf->continuousKeys(), gmf->discreteKeys(), updated_components); return updated_gmf; diff --git a/gtsam/hybrid/tests/testHybridFactorGraph.cpp b/gtsam/hybrid/tests/testHybridFactorGraph.cpp index 33c0761eb..bc0ef91d7 100644 --- a/gtsam/hybrid/tests/testHybridFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridFactorGraph.cpp @@ -55,7 +55,7 @@ TEST(HybridFactorGraph, Keys) { DecisionTree dt( M(1), std::make_shared(X(1), I_3x3, Z_3x1), std::make_shared(X(1), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(1)}, {m1}, dt)); + hfg.add(HybridGaussianFactor({X(1)}, {m1}, dt)); KeySet expected_continuous{X(0), X(1)}; EXPECT( diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index a7a315c87..96d0c478e 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -22,7 +22,7 @@ #include #include #include -#include +#include #include #include #include @@ -129,7 +129,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { DecisionTree dt( M(1), std::make_shared(X(1), I_3x3, Z_3x1), std::make_shared(X(1), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(1)}, {m1}, dt)); + hfg.add(HybridGaussianFactor({X(1)}, {m1}, dt)); auto result = hfg.eliminateSequential(); @@ -155,7 +155,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) { std::vector factors = { std::make_shared(X(1), I_3x3, Z_3x1), std::make_shared(X(1), I_3x3, Vector3::Ones())}; - hfg.add(GaussianMixtureFactor({X(1)}, {m1}, factors)); + hfg.add(HybridGaussianFactor({X(1)}, {m1}, factors)); // Discrete probability table for c1 hfg.add(DecisionTreeFactor(m1, {2, 8})); @@ -177,7 +177,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - hfg.add(GaussianMixtureFactor( + hfg.add(HybridGaussianFactor( {X(1)}, {{M(1), 2}}, {std::make_shared(X(1), I_3x3, Z_3x1), std::make_shared(X(1), I_3x3, Vector3::Ones())})); @@ -212,7 +212,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) { std::make_shared(X(1), I_3x3, Vector3::Ones())); // Hybrid factor P(x1|c1) - hfg.add(GaussianMixtureFactor({X(1)}, {m}, dt)); + hfg.add(HybridGaussianFactor({X(1)}, {m}, dt)); // Prior factor on c1 hfg.add(DecisionTreeFactor(m, {2, 8})); @@ -237,7 +237,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1)); { - hfg.add(GaussianMixtureFactor( + hfg.add(HybridGaussianFactor( {X(0)}, {{M(0), 2}}, {std::make_shared(X(0), I_3x3, Z_3x1), std::make_shared(X(0), I_3x3, Vector3::Ones())})); @@ -246,7 +246,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { M(1), std::make_shared(X(2), I_3x3, Z_3x1), std::make_shared(X(2), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(2)}, {{M(1), 2}}, dt1)); + hfg.add(HybridGaussianFactor({X(2)}, {{M(1), 2}}, dt1)); } hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); @@ -259,13 +259,13 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { M(3), std::make_shared(X(3), I_3x3, Z_3x1), std::make_shared(X(3), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(3)}, {{M(3), 2}}, dt)); + hfg.add(HybridGaussianFactor({X(3)}, {{M(3), 2}}, dt)); DecisionTree dt1( M(2), std::make_shared(X(5), I_3x3, Z_3x1), std::make_shared(X(5), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(5)}, {{M(2), 2}}, dt1)); + hfg.add(HybridGaussianFactor({X(5)}, {{M(2), 2}}, dt1)); } auto ordering_full = @@ -555,7 +555,7 @@ TEST(HybridGaussianFactorGraph, optimize) { C(1), std::make_shared(X(1), I_3x3, Z_3x1), std::make_shared(X(1), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt)); + hfg.add(HybridGaussianFactor({X(1)}, {c1}, dt)); auto result = hfg.eliminateSequential(); @@ -717,7 +717,7 @@ TEST(HybridGaussianFactorGraph, assembleGraphTree) { // Create expected decision tree with two factor graphs: // Get mixture factor: - auto mixture = fg.at(0); + auto mixture = fg.at(0); CHECK(mixture); // Get prior factor: diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 2d851b0ff..67b820c33 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -510,7 +510,7 @@ factor 0: b = [ -10 ] No noise model factor 1: -GaussianMixtureFactor +HybridGaussianFactor Hybrid [x0 x1; m0]{ Choice(m0) 0 Leaf : @@ -535,7 +535,7 @@ Hybrid [x0 x1; m0]{ } factor 2: -GaussianMixtureFactor +HybridGaussianFactor Hybrid [x1 x2; m1]{ Choice(m1) 0 Leaf : diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp index 961618626..31fe61d9b 100644 --- a/gtsam/hybrid/tests/testSerializationHybrid.cpp +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -19,7 +19,7 @@ #include #include #include -#include +#include #include #include #include @@ -51,12 +51,12 @@ BOOST_CLASS_EXPORT_GUID(ADT, "gtsam_AlgebraicDecisionTree"); BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_AlgebraicDecisionTree_Leaf"); BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_AlgebraicDecisionTree_Choice") -BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor, "gtsam_GaussianMixtureFactor"); -BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors, +BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor, "gtsam_GaussianMixtureFactor"); +BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors, "gtsam_GaussianMixtureFactor_Factors"); -BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors::Leaf, +BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Leaf, "gtsam_GaussianMixtureFactor_Factors_Leaf"); -BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors::Choice, +BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Choice, "gtsam_GaussianMixtureFactor_Factors_Choice"); BOOST_CLASS_EXPORT_GUID(GaussianMixture, "gtsam_GaussianMixture"); @@ -72,8 +72,8 @@ BOOST_CLASS_EXPORT_GUID(noiseModel::Isotropic, "gtsam_noiseModel_Isotropic"); BOOST_CLASS_EXPORT_GUID(HybridBayesNet, "gtsam_HybridBayesNet"); /* ****************************************************************************/ -// Test GaussianMixtureFactor serialization. -TEST(HybridSerialization, GaussianMixtureFactor) { +// Test HybridGaussianFactor serialization. +TEST(HybridSerialization, HybridGaussianFactor) { KeyVector continuousKeys{X(0)}; DiscreteKeys discreteKeys{{M(0), 2}}; @@ -84,11 +84,11 @@ TEST(HybridSerialization, GaussianMixtureFactor) { auto f1 = std::make_shared(X(0), A, b1); std::vector factors{f0, f1}; - const GaussianMixtureFactor factor(continuousKeys, discreteKeys, factors); + const HybridGaussianFactor factor(continuousKeys, discreteKeys, factors); - EXPECT(equalsObj(factor)); - EXPECT(equalsXML(factor)); - EXPECT(equalsBinary(factor)); + EXPECT(equalsObj(factor)); + EXPECT(equalsXML(factor)); + EXPECT(equalsBinary(factor)); } /* ****************************************************************************/ diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index 499afe09f..02a9b7b7d 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -18,7 +18,7 @@ from gtsam.utils.test_case import GtsamTestCase import gtsam from gtsam import (DiscreteConditional, DiscreteKeys, GaussianConditional, - GaussianMixture, GaussianMixtureFactor, HybridBayesNet, + GaussianMixture, HybridBayesNet, HybridGaussianFactor, HybridGaussianFactorGraph, HybridValues, JacobianFactor, Ordering, noiseModel) @@ -36,7 +36,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model) jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model) - gmf = GaussianMixtureFactor([X(0)], dk, [jf1, jf2]) + gmf = HybridGaussianFactor([X(0)], dk, [jf1, jf2]) hfg = HybridGaussianFactorGraph() hfg.push_back(jf1) @@ -63,7 +63,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model) jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model) - gmf = GaussianMixtureFactor([X(0)], dk, [jf1, jf2]) + gmf = HybridGaussianFactor([X(0)], dk, [jf1, jf2]) hfg = HybridGaussianFactorGraph() hfg.push_back(jf1) From 07b4c236eb49b53c65c3b709c0433f7d2be1b533 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 12 Sep 2024 23:20:05 -0700 Subject: [PATCH 204/398] Finishing touches --- .../tests/testGaussianMixtureFactor.cpp | 110 ++++++++---------- 1 file changed, 46 insertions(+), 64 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 049986fdb..c0a012e79 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -29,6 +29,7 @@ #include #include #include +#include #include #include @@ -433,20 +434,20 @@ HybridBayesNet CreateBayesNet( return hbn; } -/// Create importance sampling network p(x1| x0, m1) p(x0) P(m1), -/// using Q(x0) = N(z0, sigma_Q) to sample from p(x0) +/// Create importance sampling network q(x0,x1,m) = p(x1|x0,m1) q(x0) P(m1), +/// using q(x0) = N(z0, sigma_Q) to sample x0. HybridBayesNet CreateProposalNet( - const GaussianMixture::shared_ptr& hybridMotionModel, double z0, + const GaussianMixture::shared_ptr& hybridMotionModel, const Vector1& z0, double sigma_Q) { HybridBayesNet hbn; // Add hybrid motion model hbn.push_back(hybridMotionModel); - // Add proposal Q(x0) for x0 + // Add proposal q(x0) for x0 auto measurement_model = noiseModel::Isotropic::Sigma(1, sigma_Q); hbn.emplace_shared( - GaussianConditional::FromMeanAndStddev(X(0), Vector1(z0), sigma_Q)); + GaussianConditional::FromMeanAndStddev(X(0), z0, sigma_Q)); // Discrete uniform prior. hbn.emplace_shared(m1, "0.5/0.5"); @@ -466,7 +467,7 @@ void approximateDiscreteMarginal(const HybridBayesNet& hbn, HybridValues sample = proposalNet.sample(&rng); sample.insert(given); double weight = hbn.evaluate(sample) / proposalNet.evaluate(sample); - (sample.atDiscrete(m1.first) == 0) ? w0 += weight : w1 += weight; + (sample.atDiscrete(M(1)) == 0) ? w0 += weight : w1 += weight; } double sumWeights = w0 + w1; double pm1 = w1 / sumWeights; @@ -478,15 +479,14 @@ void approximateDiscreteMarginal(const HybridBayesNet& hbn, /* ************************************************************************* */ /** - * Test a model P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). + * Test a model p(z0|x0)p(z1|x1)p(x1|x0,m1)P(m1). * - * P(f01|x1,x0,m1) has different means and same covariance. + * p(x1|x0,m1) has mode-dependent mean but same covariance. * - * Converting to a factor graph gives us - * ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) + * Converting to a factor graph gives us ϕ(x0;z0)ϕ(x1;z1)ϕ(x1,x0,m1)P(m1) * - * If we only have a measurement on z0, then - * the probability of m1 should be 0.5/0.5. + * If we only have a measurement on x0, then + * the posterior probability of m1 should be 0.5/0.5. * Getting a measurement on z1 gives use more information. */ TEST(GaussianMixtureFactor, TwoStateModel) { @@ -497,10 +497,10 @@ TEST(GaussianMixtureFactor, TwoStateModel) { auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma, sigma); // Start with no measurement on x1, only on x0 - double z0 = 0.5; + const Vector1 z0(0.5); VectorValues given; - given.insert(Z(0), Vector1(z0)); + given.insert(Z(0), z0); // Create proposal network for importance sampling auto proposalNet = CreateProposalNet(hybridMotionModel, z0, 3.0); @@ -522,10 +522,10 @@ TEST(GaussianMixtureFactor, TwoStateModel) { // Now we add a measurement z1 on x1 HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); - // If we see z1=4.5 (>> 2.5 which is the halfway point), - // discrete mode should say m1=1 - const double z1 = 4.5; - given.insert(Z(1), Vector1(z1)); + // If we set z1=4.5 (>> 2.5 which is the halfway point), + // probability of discrete mode should be leaning to m1==1. + const Vector1 z1(4.5); + given.insert(Z(1), z1); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); @@ -534,7 +534,7 @@ TEST(GaussianMixtureFactor, TwoStateModel) { // Values taken from an importance sampling run with 50k samples: // approximateDiscreteMarginal(hbn, proposalNet, given); DiscreteConditional expected(m1, "0.446629/0.553371"); - EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.01)); + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002)); } } @@ -559,9 +559,9 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma0, sigma1); // Start with no measurement on x1, only on x0 - double z0 = 0.5; + const Vector1 z0(0.5); VectorValues given; - given.insert(Z(0), Vector1(z0)); + given.insert(Z(0), z0); // Create proposal network for importance sampling // uncomment this and the approximateDiscreteMarginal calls to run @@ -571,19 +571,13 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { HybridBayesNet hbn = CreateBayesNet(hybridMotionModel); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - { - VectorValues vv{ - {X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}, {Z(0), Vector1(z0)}}; - HybridValues hv0(vv, DiscreteValues{{M(1), 0}}), - hv1(vv, DiscreteValues{{M(1), 1}}); - EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), - gfg.error(hv1) / hbn.error(hv1), 1e-9); - } - { - VectorValues vv{ - {X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}, {Z(0), Vector1(z0)}}; - HybridValues hv0(vv, DiscreteValues{{M(1), 0}}), - hv1(vv, DiscreteValues{{M(1), 1}}); + // Check that ratio of Bayes net and factor graph for different modes is + // equal for several values of {x0,x1}. + for (VectorValues vv : + {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, + VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { + vv.insert(given); // add measurements for HBN + HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}}); EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), gfg.error(hv1) / hbn.error(hv1), 1e-9); } @@ -595,66 +589,54 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { // Since no measurement on x1, we a 50/50 probability auto p_m = bn->at(2)->asDiscrete(); - EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()(DiscreteValues{{m1.first, 0}}), - 1e-9); - EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()(DiscreteValues{{m1.first, 1}}), - 1e-9); + EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()(DiscreteValues{{M(1), 0}}), 1e-9); + EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()(DiscreteValues{{M(1), 1}}), 1e-9); } { // Now we add a measurement z1 on x1 HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); - double z1 = 4.0; // favors m==1 - given.insert(Z(1), Vector1(z1)); + const Vector1 z1(4.0); // favors m==1 + given.insert(Z(1), z1); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - { - VectorValues vv{{X(0), Vector1(0.0)}, - {X(1), Vector1(1.0)}, - {Z(0), Vector1(z0)}, - {Z(1), Vector1(z1)}}; - HybridValues hv0(vv, DiscreteValues{{M(1), 0}}), - hv1(vv, DiscreteValues{{M(1), 1}}); - EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), - gfg.error(hv1) / hbn.error(hv1), 1e-9); - } - { - VectorValues vv{{X(0), Vector1(0.5)}, - {X(1), Vector1(3.0)}, - {Z(0), Vector1(z0)}, - {Z(1), Vector1(z1)}}; - HybridValues hv0(vv, DiscreteValues{{M(1), 0}}), - hv1(vv, DiscreteValues{{M(1), 1}}); + // Check that ratio of Bayes net and factor graph for different modes is + // equal for several values of {x0,x1}. + for (VectorValues vv : + {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, + VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { + vv.insert(given); // add measurements for HBN + HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}}); EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), gfg.error(hv1) / hbn.error(hv1), 1e-9); } HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - // Since we have a measurement on z2, we get a definite result + // Since we have a measurement z1 on x1, we get a definite result // Values taken from an importance sampling run with 50k samples: // approximateDiscreteMarginal(hbn, proposalNet, given); DiscreteConditional expected(m1, "0.481793/0.518207"); - EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.01)); + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.001)); } { - // Add a different measurement z1 on that favors m==0 + // Add a different measurement z1 on x1 that favors m==0 HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); - double z1 = 1.1; - given.insert_or_assign(Z(1), Vector1(z1)); + const Vector1 z1(1.1); + given.insert_or_assign(Z(1), z1); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - // Since we have a measurement on z2, we get a definite result + // Since we have a measurement z1 on x1, we get a definite result // Values taken from an importance sampling run with 50k samples: // approximateDiscreteMarginal(hbn, proposalNet, given); DiscreteConditional expected(m1, "0.554485/0.445515"); - EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.01)); + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.001)); } } From 70651e2cc501d75b764e038c8623b492535bcfda Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 12 Sep 2024 23:45:32 -0700 Subject: [PATCH 205/398] Inline q, use 100k --- .../tests/testGaussianMixtureFactor.cpp | 113 +++++++----------- 1 file changed, 43 insertions(+), 70 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index c0a012e79..ab8453fba 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -235,7 +235,7 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, hbn.emplace_shared(KeyVector{z}, KeyVector{}, DiscreteKeys{m}, std::vector{c0, c1}); - auto mixing = make_shared(m, "0.5/0.5"); + auto mixing = make_shared(m, "50/50"); hbn.push_back(mixing); return hbn; @@ -281,7 +281,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { // At the halfway point between the means, we should get P(m|z)=0.5 HybridBayesNet expected; - expected.emplace_shared(m, "0.5/0.5"); + expected.emplace_shared(m, "50/50"); EXPECT(assert_equal(expected, *bn)); } @@ -429,50 +429,37 @@ HybridBayesNet CreateBayesNet( hbn.push_back(hybridMotionModel); // Discrete uniform prior. - hbn.emplace_shared(m1, "0.5/0.5"); - - return hbn; -} - -/// Create importance sampling network q(x0,x1,m) = p(x1|x0,m1) q(x0) P(m1), -/// using q(x0) = N(z0, sigma_Q) to sample x0. -HybridBayesNet CreateProposalNet( - const GaussianMixture::shared_ptr& hybridMotionModel, const Vector1& z0, - double sigma_Q) { - HybridBayesNet hbn; - - // Add hybrid motion model - hbn.push_back(hybridMotionModel); - - // Add proposal q(x0) for x0 - auto measurement_model = noiseModel::Isotropic::Sigma(1, sigma_Q); - hbn.emplace_shared( - GaussianConditional::FromMeanAndStddev(X(0), z0, sigma_Q)); - - // Discrete uniform prior. - hbn.emplace_shared(m1, "0.5/0.5"); + hbn.emplace_shared(m1, "50/50"); return hbn; } /// Approximate the discrete marginal P(m1) using importance sampling /// Not typically called as expensive, but values are used in the tests. -void approximateDiscreteMarginal(const HybridBayesNet& hbn, - const HybridBayesNet& proposalNet, - const VectorValues& given) { +void approximateDiscreteMarginal( + const HybridBayesNet& hbn, + const GaussianMixture::shared_ptr& hybridMotionModel, + const VectorValues& given, size_t N = 100000) { + /// Create importance sampling network q(x0,x1,m) = p(x1|x0,m1) q(x0) P(m1), + /// using q(x0) = N(z0, sigma_Q) to sample x0. + HybridBayesNet q; + q.push_back(hybridMotionModel); // Add hybrid motion model + q.emplace_shared(GaussianConditional::FromMeanAndStddev( + X(0), given.at(Z(0)), /* sigma_Q = */ 3.0)); // Add proposal q(x0) for x0 + q.emplace_shared(m1, "50/50"); // Discrete prior. + // Do importance sampling double w0 = 0.0, w1 = 0.0; - std::mt19937_64 rng(44); - for (int i = 0; i < 50000; i++) { - HybridValues sample = proposalNet.sample(&rng); + std::mt19937_64 rng(42); + for (int i = 0; i < N; i++) { + HybridValues sample = q.sample(&rng); sample.insert(given); - double weight = hbn.evaluate(sample) / proposalNet.evaluate(sample); + double weight = hbn.evaluate(sample) / q.evaluate(sample); (sample.atDiscrete(M(1)) == 0) ? w0 += weight : w1 += weight; } - double sumWeights = w0 + w1; - double pm1 = w1 / sumWeights; - std::cout << "p(m0) ~ " << 1.0 - pm1 << std::endl; - std::cout << "p(m1) ~ " << pm1 << std::endl; + double pm1 = w1 / (w0 + w1); + std::cout << "p(m0) = " << 100 * (1.0 - pm1) << std::endl; + std::cout << "p(m1) = " << 100 * pm1 << std::endl; } } // namespace test_two_state_estimation @@ -502,38 +489,32 @@ TEST(GaussianMixtureFactor, TwoStateModel) { VectorValues given; given.insert(Z(0), z0); - // Create proposal network for importance sampling - auto proposalNet = CreateProposalNet(hybridMotionModel, z0, 3.0); - EXPECT_LONGS_EQUAL(3, proposalNet.size()); - { HybridBayesNet hbn = CreateBayesNet(hybridMotionModel); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); // Since no measurement on x1, we hedge our bets - // Importance sampling run with 50k samples gives 0.49934/0.50066 - // approximateDiscreteMarginal(hbn, proposalNet, given); - DiscreteConditional expected(m1, "0.5/0.5"); + // Importance sampling run with 100k samples gives 50.051/49.949 + // approximateDiscreteMarginal(hbn, hybridMotionModel, given); + DiscreteConditional expected(m1, "50/50"); EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()))); } { - // Now we add a measurement z1 on x1 - HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); - // If we set z1=4.5 (>> 2.5 which is the halfway point), // probability of discrete mode should be leaning to m1==1. const Vector1 z1(4.5); given.insert(Z(1), z1); + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); // Since we have a measurement on x1, we get a definite result - // Values taken from an importance sampling run with 50k samples: - // approximateDiscreteMarginal(hbn, proposalNet, given); - DiscreteConditional expected(m1, "0.446629/0.553371"); + // Values taken from an importance sampling run with 100k samples: + // approximateDiscreteMarginal(hbn, hybridMotionModel, given); + DiscreteConditional expected(m1, "44.3854/55.6146"); EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002)); } } @@ -563,10 +544,6 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { VectorValues given; given.insert(Z(0), z0); - // Create proposal network for importance sampling - // uncomment this and the approximateDiscreteMarginal calls to run - // auto proposalNet = CreateProposalNet(hybridMotionModel, z0, 3.0); - { HybridBayesNet hbn = CreateBayesNet(hybridMotionModel); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); @@ -584,22 +561,21 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - // Importance sampling run with 50k samples gives 0.49934/0.50066 - // approximateDiscreteMarginal(hbn, proposalNet, given); + // Importance sampling run with 100k samples gives 50.095/49.905 + // approximateDiscreteMarginal(hbn, hybridMotionModel, given); // Since no measurement on x1, we a 50/50 probability auto p_m = bn->at(2)->asDiscrete(); - EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()(DiscreteValues{{M(1), 0}}), 1e-9); - EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()(DiscreteValues{{M(1), 1}}), 1e-9); + EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 0}}), 1e-9); + EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 1}}), 1e-9); } { // Now we add a measurement z1 on x1 - HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); - const Vector1 z1(4.0); // favors m==1 given.insert(Z(1), z1); + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); // Check that ratio of Bayes net and factor graph for different modes is @@ -615,28 +591,25 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - // Since we have a measurement z1 on x1, we get a definite result - // Values taken from an importance sampling run with 50k samples: - // approximateDiscreteMarginal(hbn, proposalNet, given); - DiscreteConditional expected(m1, "0.481793/0.518207"); - EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.001)); + // Values taken from an importance sampling run with 100k samples: + // approximateDiscreteMarginal(hbn, hybridMotionModel, given); + DiscreteConditional expected(m1, "48.3158/51.6842"); + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002)); } { // Add a different measurement z1 on x1 that favors m==0 - HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); - const Vector1 z1(1.1); given.insert_or_assign(Z(1), z1); + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - // Since we have a measurement z1 on x1, we get a definite result - // Values taken from an importance sampling run with 50k samples: - // approximateDiscreteMarginal(hbn, proposalNet, given); - DiscreteConditional expected(m1, "0.554485/0.445515"); - EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.001)); + // Values taken from an importance sampling run with 100k samples: + // approximateDiscreteMarginal(hbn, hybridMotionModel, given); + DiscreteConditional expected(m1, "55.396/44.604"); + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002)); } } From 8fbabf5c2417738a561c1e6193dac56e2cdd8fb9 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Fri, 13 Sep 2024 01:08:33 -0700 Subject: [PATCH 206/398] Extreme example --- .../tests/testGaussianMixtureFactor.cpp | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index ab8453fba..7947779c8 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -613,6 +613,33 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { } } +/* ************************************************************************* */ +/** + * Same model, P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1), but now with very informative + * measurements and vastly different motion model: either stand still or move + * far. This yields a very informative posterior. + */ +TEST(GaussianMixtureFactor, TwoStateModel3) { + using namespace test_two_state_estimation; + + double mu0 = 0.0, mu1 = 10.0; + double sigma0 = 0.2, sigma1 = 5.0; + auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma0, sigma1); + + // We only check the 2-measurement case + const Vector1 z0(0.0), z1(10.0); + VectorValues given{{Z(0), z0}, {Z(1), z1}}; + + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Values taken from an importance sampling run with 100k samples: + // approximateDiscreteMarginal(hbn, hybridMotionModel, given); + DiscreteConditional expected(m1, "8.91527/91.0847"); + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002)); +} + /* ************************************************************************* */ int main() { TestResult tr; From bf00ca891d103b183aa44d624e1181b3a6b788ea Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Fri, 13 Sep 2024 01:18:05 -0700 Subject: [PATCH 207/398] Small improvements --- .../tests/testGaussianMixtureFactor.cpp | 32 +++++++++++++------ 1 file changed, 22 insertions(+), 10 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 7947779c8..0ccb6d7bc 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -392,6 +392,12 @@ namespace test_two_state_estimation { DiscreteKey m1(M(1), 2); +void addMeasurement(HybridBayesNet& hbn, Key z_key, Key x_key, double sigma) { + auto measurement_model = noiseModel::Isotropic::Sigma(1, sigma); + hbn.emplace_shared(z_key, Vector1(0.0), I_1x1, x_key, + -I_1x1, measurement_model); +} + /// Create hybrid motion model p(x1 | x0, m1) static GaussianMixture::shared_ptr CreateHybridMotionModel(double mu0, double mu1, @@ -414,15 +420,11 @@ HybridBayesNet CreateBayesNet( HybridBayesNet hbn; // Add measurement model p(z0 | x0) - const double measurement_sigma = 3.0; - auto measurement_model = noiseModel::Isotropic::Sigma(1, measurement_sigma); - hbn.emplace_shared(Z(0), Vector1(0.0), I_1x1, X(0), - -I_1x1, measurement_model); + addMeasurement(hbn, Z(0), X(0), 3.0); // Optionally add second measurement model p(z1 | x1) if (add_second_measurement) { - hbn.emplace_shared(Z(1), Vector1(0.0), I_1x1, X(1), - -I_1x1, measurement_model); + addMeasurement(hbn, Z(1), X(1), 3.0); } // Add hybrid motion model @@ -434,18 +436,27 @@ HybridBayesNet CreateBayesNet( return hbn; } +/** + * @brief Approximates the discrete marginal P(m1) using importance sampling. + * @note Not typically called as expensive, but values are used in the tests. + * + * @param hbn The hybrid Bayesian network. + * @param hybridMotionModel The hybrid motion model. + * @param given Observed values for variables. + * @param N Number of samples for importance sampling. + * @return std::pair Probabilities for m1 = 0 and m1 = 1. + */ /// Approximate the discrete marginal P(m1) using importance sampling -/// Not typically called as expensive, but values are used in the tests. -void approximateDiscreteMarginal( +std::pair approximateDiscreteMarginal( const HybridBayesNet& hbn, const GaussianMixture::shared_ptr& hybridMotionModel, const VectorValues& given, size_t N = 100000) { /// Create importance sampling network q(x0,x1,m) = p(x1|x0,m1) q(x0) P(m1), - /// using q(x0) = N(z0, sigma_Q) to sample x0. + /// using q(x0) = N(z0, sigmaQ) to sample x0. HybridBayesNet q; q.push_back(hybridMotionModel); // Add hybrid motion model q.emplace_shared(GaussianConditional::FromMeanAndStddev( - X(0), given.at(Z(0)), /* sigma_Q = */ 3.0)); // Add proposal q(x0) for x0 + X(0), given.at(Z(0)), /* sigmaQ = */ 3.0)); // Add proposal q(x0) for x0 q.emplace_shared(m1, "50/50"); // Discrete prior. // Do importance sampling @@ -460,6 +471,7 @@ void approximateDiscreteMarginal( double pm1 = w1 / (w0 + w1); std::cout << "p(m0) = " << 100 * (1.0 - pm1) << std::endl; std::cout << "p(m1) = " << 100 * pm1 << std::endl; + return {1.0 - pm1, pm1}; } } // namespace test_two_state_estimation From fcc26e553d394782380955e2075e616bb3e550b6 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Fri, 13 Sep 2024 01:21:39 -0700 Subject: [PATCH 208/398] Remove redundant doc --- gtsam/hybrid/tests/testGaussianMixtureFactor.cpp | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 0ccb6d7bc..dab11039f 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -436,16 +436,6 @@ HybridBayesNet CreateBayesNet( return hbn; } -/** - * @brief Approximates the discrete marginal P(m1) using importance sampling. - * @note Not typically called as expensive, but values are used in the tests. - * - * @param hbn The hybrid Bayesian network. - * @param hybridMotionModel The hybrid motion model. - * @param given Observed values for variables. - * @param N Number of samples for importance sampling. - * @return std::pair Probabilities for m1 = 0 and m1 = 1. - */ /// Approximate the discrete marginal P(m1) using importance sampling std::pair approximateDiscreteMarginal( const HybridBayesNet& hbn, From 187935407c27974c0e61e8a6b22eec43d2b622b4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 13 Sep 2024 05:40:00 -0400 Subject: [PATCH 209/398] rename MixtureFactor to HybridNonlinearFactor --- gtsam/hybrid/GaussianMixture.cpp | 8 ++--- gtsam/hybrid/GaussianMixtureFactor.h | 2 +- gtsam/hybrid/HybridFactor.h | 2 +- gtsam/hybrid/HybridNonlinearFactorGraph.cpp | 6 ++-- gtsam/hybrid/MixtureFactor.h | 28 ++++++++--------- gtsam/hybrid/hybrid.i | 10 +++---- gtsam/hybrid/tests/Switching.h | 4 +-- gtsam/hybrid/tests/testHybridBayesNet.cpp | 2 +- gtsam/hybrid/tests/testHybridEstimation.cpp | 10 +++---- gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 6 ++-- .../tests/testHybridNonlinearFactorGraph.cpp | 18 +++++------ .../hybrid/tests/testHybridNonlinearISAM.cpp | 6 ++-- gtsam/hybrid/tests/testMixtureFactor.cpp | 30 +++++++++---------- .../tests/test_HybridNonlinearFactorGraph.py | 4 +-- 14 files changed, 68 insertions(+), 68 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index feb49b6a2..f1f1ce429 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -72,7 +72,7 @@ GaussianMixture::GaussianMixture( /* *******************************************************************************/ // TODO(dellaert): This is copy/paste: GaussianMixture should be derived from -// HybridGaussianFactor, no? +// GaussianMixtureFactor, no? GaussianFactorGraphTree GaussianMixture::add( const GaussianFactorGraphTree &sum) const { using Y = GaussianFactorGraph; @@ -203,7 +203,7 @@ bool GaussianMixture::allFrontalsGiven(const VectorValues &given) const { } /* ************************************************************************* */ -std::shared_ptr GaussianMixture::likelihood( +std::shared_ptr GaussianMixture::likelihood( const VectorValues &given) const { if (!allFrontalsGiven(given)) { throw std::runtime_error( @@ -212,7 +212,7 @@ std::shared_ptr GaussianMixture::likelihood( const DiscreteKeys discreteParentKeys = discreteKeys(); const KeyVector continuousParentKeys = continuousParents(); - const HybridGaussianFactor::Factors likelihoods( + const GaussianMixtureFactor::Factors likelihoods( conditionals_, [&](const GaussianConditional::shared_ptr &conditional) { const auto likelihood_m = conditional->likelihood(given); const double Cgm_Kgcm = @@ -231,7 +231,7 @@ std::shared_ptr GaussianMixture::likelihood( return std::make_shared(gfg); } }); - return std::make_shared( + return std::make_shared( continuousParentKeys, discreteParentKeys, likelihoods); } diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 467412b1d..9915e24e6 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -146,7 +146,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// Getter for GaussianFactor decision tree const Factors &factors() const { return factors_; } - /// Add MixtureFactor to a Sum, syntactic sugar. + /// Add HybridNonlinearFactor to a Sum, syntactic sugar. friend GaussianFactorGraphTree &operator+=( GaussianFactorGraphTree &sum, const HybridGaussianFactor &factor) { sum = factor.add(sum); diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index 4a68f851b..2a572ea65 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -45,7 +45,7 @@ DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1, * Base class for *truly* hybrid probabilistic factors * * Examples: - * - MixtureFactor + * - HybridNonlinearFactor * - HybridGaussianFactor * - GaussianMixture * diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp index 923058d05..35a8ea355 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -21,7 +21,7 @@ #include #include #include -#include +#include #include namespace gtsam { @@ -59,7 +59,7 @@ void HybridNonlinearFactorGraph::printErrors( // Clear the stringstream ss.str(std::string()); - if (auto mf = std::dynamic_pointer_cast(factor)) { + if (auto mf = std::dynamic_pointer_cast(factor)) { if (factor == nullptr) { std::cout << "nullptr" << "\n"; @@ -151,7 +151,7 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize( continue; } // Check if it is a nonlinear mixture factor - if (auto mf = dynamic_pointer_cast(f)) { + if (auto mf = dynamic_pointer_cast(f)) { const HybridGaussianFactor::shared_ptr& gmf = mf->linearize(continuousValues); linearFG->push_back(gmf); diff --git a/gtsam/hybrid/MixtureFactor.h b/gtsam/hybrid/MixtureFactor.h index d2e0e7822..d3d673dda 100644 --- a/gtsam/hybrid/MixtureFactor.h +++ b/gtsam/hybrid/MixtureFactor.h @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file MixtureFactor.h + * @file HybridNonlinearFactor.h * @brief Nonlinear Mixture factor of continuous and discrete. * @author Kevin Doherty, kdoherty@mit.edu * @author Varun Agrawal @@ -44,11 +44,11 @@ namespace gtsam { * one of (NonlinearFactor, GaussianFactor) which can then be checked to perform * the correct operation. */ -class MixtureFactor : public HybridFactor { +class HybridNonlinearFactor : public HybridFactor { public: using Base = HybridFactor; - using This = MixtureFactor; - using shared_ptr = std::shared_ptr; + using This = HybridNonlinearFactor; + using shared_ptr = std::shared_ptr; using sharedFactor = std::shared_ptr; /** @@ -63,7 +63,7 @@ class MixtureFactor : public HybridFactor { bool normalized_; public: - MixtureFactor() = default; + HybridNonlinearFactor() = default; /** * @brief Construct from Decision tree. @@ -74,7 +74,7 @@ class MixtureFactor : public HybridFactor { * @param normalized Flag indicating if the factor error is already * normalized. */ - MixtureFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, + HybridNonlinearFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, const Factors& factors, bool normalized = false) : Base(keys, discreteKeys), factors_(factors), normalized_(normalized) {} @@ -95,7 +95,7 @@ class MixtureFactor : public HybridFactor { * normalized. */ template - MixtureFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, + HybridNonlinearFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, const std::vector>& factors, bool normalized = false) : Base(keys, discreteKeys), normalized_(normalized) { @@ -111,7 +111,7 @@ class MixtureFactor : public HybridFactor { nonlinear_factors.push_back(nf); } else { throw std::runtime_error( - "Factors passed into MixtureFactor need to be nonlinear!"); + "Factors passed into HybridNonlinearFactor need to be nonlinear!"); } } factors_ = Factors(discreteKeys, nonlinear_factors); @@ -124,7 +124,7 @@ class MixtureFactor : public HybridFactor { } /** - * @brief Compute error of the MixtureFactor as a tree. + * @brief Compute error of the HybridNonlinearFactor as a tree. * * @param continuousValues The continuous values for which to compute the * error. @@ -201,15 +201,15 @@ class MixtureFactor : public HybridFactor { /// Check equality bool equals(const HybridFactor& other, double tol = 1e-9) const override { - // We attempt a dynamic cast from HybridFactor to MixtureFactor. If it + // We attempt a dynamic cast from HybridFactor to HybridNonlinearFactor. If it // fails, return false. - if (!dynamic_cast(&other)) return false; + if (!dynamic_cast(&other)) return false; - // If the cast is successful, we'll properly construct a MixtureFactor + // If the cast is successful, we'll properly construct a HybridNonlinearFactor // object from `other` - const MixtureFactor& f(static_cast(other)); + const HybridNonlinearFactor& f(static_cast(other)); - // Ensure that this MixtureFactor and `f` have the same `factors_`. + // Ensure that this HybridNonlinearFactor and `f` have the same `factors_`. auto compare = [tol](const sharedFactor& a, const sharedFactor& b) { return traits::Equals(*a, *b, tol); }; diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index 0c72d5046..e5be00c6f 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -235,15 +235,15 @@ class HybridNonlinearFactorGraph { gtsam::DefaultKeyFormatter) const; }; -#include -class MixtureFactor : gtsam::HybridFactor { - MixtureFactor( +#include +class HybridNonlinearFactor : gtsam::HybridFactor { + HybridNonlinearFactor( const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys, const gtsam::DecisionTree& factors, bool normalized = false); template - MixtureFactor(const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys, + HybridNonlinearFactor(const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys, const std::vector& factors, bool normalized = false); @@ -256,7 +256,7 @@ class MixtureFactor : gtsam::HybridFactor { HybridGaussianFactor* linearize( const gtsam::Values& continuousValues) const; - void print(string s = "MixtureFactor\n", + void print(string s = "HybridNonlinearFactor\n", const gtsam::KeyFormatter& keyFormatter = gtsam::DefaultKeyFormatter) const; }; diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 126b15c6e..0e8680aa9 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -22,7 +22,7 @@ #include #include #include -#include +#include #include #include #include @@ -163,7 +163,7 @@ struct Switching { for (auto &&f : motion_models) { components.push_back(std::dynamic_pointer_cast(f)); } - nonlinearFactorGraph.emplace_shared( + nonlinearFactorGraph.emplace_shared( keys, DiscreteKeys{modes[k]}, components); } diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 3a7e008d8..9b7ba9744 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -389,7 +389,7 @@ TEST(HybridBayesNet, Sampling) { std::make_shared>(X(0), X(1), 1, noise_model); std::vector factors = {zero_motion, one_motion}; nfg.emplace_shared>(X(0), 0.0, noise_model); - nfg.emplace_shared( + nfg.emplace_shared( KeyVector{X(0), X(1)}, DiscreteKeys{DiscreteKey(M(0), 2)}, factors); DiscreteKey mode(M(0), 2); diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 2ec9c514a..295633916 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -22,7 +22,7 @@ #include #include #include -#include +#include #include #include #include @@ -435,7 +435,7 @@ static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() { std::make_shared>(X(0), X(1), 0, noise_model); const auto one_motion = std::make_shared>(X(0), X(1), 1, noise_model); - nfg.emplace_shared( + nfg.emplace_shared( KeyVector{X(0), X(1)}, DiscreteKeys{m}, std::vector{zero_motion, one_motion}); @@ -532,7 +532,7 @@ TEST(HybridEstimation, CorrectnessViaSampling) { * the difference in noise models. */ std::shared_ptr mixedVarianceFactor( - const MixtureFactor& mf, const Values& initial, const Key& mode, + const HybridNonlinearFactor& mf, const Values& initial, const Key& mode, double noise_tight, double noise_loose, size_t d, size_t tight_index) { HybridGaussianFactor::shared_ptr gmf = mf.linearize(initial); @@ -592,7 +592,7 @@ TEST(HybridEstimation, ModeSelection) { std::vector components = {model0, model1}; KeyVector keys = {X(0), X(1)}; - MixtureFactor mf(keys, modes, components); + HybridNonlinearFactor mf(keys, modes, components); initial.insert(X(0), 0.0); initial.insert(X(1), 0.0); @@ -683,7 +683,7 @@ TEST(HybridEstimation, ModeSelection2) { std::vector components = {model0, model1}; KeyVector keys = {X(0), X(1)}; - MixtureFactor mf(keys, modes, components); + HybridNonlinearFactor mf(keys, modes, components); initial.insert(X(0), Z_3x1); initial.insert(X(1), Z_3x1); diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index 249cbf9c3..d2b0fded3 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -418,7 +418,7 @@ TEST(HybridGaussianISAM, NonTrivial) { moving = std::make_shared(W(0), W(1), odometry, noise_model); std::vector components = {moving, still}; - auto mixtureFactor = std::make_shared( + auto mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); fg.push_back(mixtureFactor); @@ -458,7 +458,7 @@ TEST(HybridGaussianISAM, NonTrivial) { moving = std::make_shared(W(1), W(2), odometry, noise_model); components = {moving, still}; - mixtureFactor = std::make_shared( + mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); fg.push_back(mixtureFactor); @@ -501,7 +501,7 @@ TEST(HybridGaussianISAM, NonTrivial) { moving = std::make_shared(W(2), W(3), odometry, noise_model); components = {moving, still}; - mixtureFactor = std::make_shared( + mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); fg.push_back(mixtureFactor); diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 67b820c33..9e2816865 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -24,7 +24,7 @@ #include #include #include -#include +#include #include #include #include @@ -105,7 +105,7 @@ TEST(HybridNonlinearFactorGraph, Resize) { auto discreteFactor = std::make_shared(); fg.push_back(discreteFactor); - auto dcFactor = std::make_shared(); + auto dcFactor = std::make_shared(); fg.push_back(dcFactor); EXPECT_LONGS_EQUAL(fg.size(), 3); @@ -132,7 +132,7 @@ TEST(HybridGaussianFactorGraph, Resize) { moving = std::make_shared(X(0), X(1), 1.0, noise_model); std::vector components = {still, moving}; - auto dcFactor = std::make_shared( + auto dcFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); nhfg.push_back(dcFactor); @@ -150,10 +150,10 @@ TEST(HybridGaussianFactorGraph, Resize) { } /*************************************************************************** - * Test that the MixtureFactor reports correctly if the number of continuous + * Test that the HybridNonlinearFactor reports correctly if the number of continuous * keys provided do not match the keys in the factors. */ -TEST(HybridGaussianFactorGraph, MixtureFactor) { +TEST(HybridGaussianFactorGraph, HybridNonlinearFactor) { auto nonlinearFactor = std::make_shared>( X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1)); auto discreteFactor = std::make_shared(); @@ -166,12 +166,12 @@ TEST(HybridGaussianFactorGraph, MixtureFactor) { // Check for exception when number of continuous keys are under-specified. KeyVector contKeys = {X(0)}; - THROWS_EXCEPTION(std::make_shared( + THROWS_EXCEPTION(std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components)); // Check for exception when number of continuous keys are too many. contKeys = {X(0), X(1), X(2)}; - THROWS_EXCEPTION(std::make_shared( + THROWS_EXCEPTION(std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components)); } @@ -195,7 +195,7 @@ TEST(HybridFactorGraph, PushBack) { fg = HybridNonlinearFactorGraph(); - auto dcFactor = std::make_shared(); + auto dcFactor = std::make_shared(); fg.push_back(dcFactor); EXPECT_LONGS_EQUAL(fg.size(), 1); @@ -800,7 +800,7 @@ TEST(HybridFactorGraph, DefaultDecisionTree) { moving = std::make_shared(X(0), X(1), odometry, noise_model); std::vector motion_models = {still, moving}; - fg.emplace_shared( + fg.emplace_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, motion_models); // Add Range-Bearing measurements to from X0 to L0 and X1 to L1. diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index 2fb6fd8ba..e2ed4cbad 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -437,7 +437,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { moving = std::make_shared(W(0), W(1), odometry, noise_model); std::vector components = {moving, still}; - auto mixtureFactor = std::make_shared( + auto mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); fg.push_back(mixtureFactor); @@ -477,7 +477,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { moving = std::make_shared(W(1), W(2), odometry, noise_model); components = {moving, still}; - mixtureFactor = std::make_shared( + mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); fg.push_back(mixtureFactor); @@ -520,7 +520,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { moving = std::make_shared(W(2), W(3), odometry, noise_model); components = {moving, still}; - mixtureFactor = std::make_shared( + mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); fg.push_back(mixtureFactor); diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index a58a4767f..dead8cd34 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -11,7 +11,7 @@ /** * @file testMixtureFactor.cpp - * @brief Unit tests for MixtureFactor + * @brief Unit tests for HybridNonlinearFactor * @author Varun Agrawal * @date October 2022 */ @@ -21,7 +21,7 @@ #include #include #include -#include +#include #include #include @@ -36,17 +36,17 @@ using symbol_shorthand::X; /* ************************************************************************* */ // Check iterators of empty mixture. -TEST(MixtureFactor, Constructor) { - MixtureFactor factor; - MixtureFactor::const_iterator const_it = factor.begin(); +TEST(HybridNonlinearFactor, Constructor) { + HybridNonlinearFactor factor; + HybridNonlinearFactor::const_iterator const_it = factor.begin(); CHECK(const_it == factor.end()); - MixtureFactor::iterator it = factor.begin(); + HybridNonlinearFactor::iterator it = factor.begin(); CHECK(it == factor.end()); } /* ************************************************************************* */ // Test .print() output. -TEST(MixtureFactor, Printing) { +TEST(HybridNonlinearFactor, Printing) { DiscreteKey m1(1, 2); double between0 = 0.0; double between1 = 1.0; @@ -60,11 +60,11 @@ TEST(MixtureFactor, Printing) { std::make_shared>(X(1), X(2), between1, model); std::vector factors{f0, f1}; - MixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); + HybridNonlinearFactor mixtureFactor({X(1), X(2)}, {m1}, factors); std::string expected = R"(Hybrid [x1 x2; 1] -MixtureFactor +HybridNonlinearFactor Choice(1) 0 Leaf Nonlinear factor on 2 keys 1 Leaf Nonlinear factor on 2 keys @@ -73,7 +73,7 @@ MixtureFactor } /* ************************************************************************* */ -static MixtureFactor getMixtureFactor() { +static HybridNonlinearFactor getMixtureFactor() { DiscreteKey m1(1, 2); double between0 = 0.0; @@ -88,12 +88,12 @@ static MixtureFactor getMixtureFactor() { std::make_shared>(X(1), X(2), between1, model); std::vector factors{f0, f1}; - return MixtureFactor({X(1), X(2)}, {m1}, factors); + return HybridNonlinearFactor({X(1), X(2)}, {m1}, factors); } /* ************************************************************************* */ -// Test the error of the MixtureFactor -TEST(MixtureFactor, Error) { +// Test the error of the HybridNonlinearFactor +TEST(HybridNonlinearFactor, Error) { auto mixtureFactor = getMixtureFactor(); Values continuousValues; @@ -112,8 +112,8 @@ TEST(MixtureFactor, Error) { } /* ************************************************************************* */ -// Test dim of the MixtureFactor -TEST(MixtureFactor, Dim) { +// Test dim of the HybridNonlinearFactor +TEST(HybridNonlinearFactor, Dim) { auto mixtureFactor = getMixtureFactor(); EXPECT_LONGS_EQUAL(1, mixtureFactor.dim()); } diff --git a/python/gtsam/tests/test_HybridNonlinearFactorGraph.py b/python/gtsam/tests/test_HybridNonlinearFactorGraph.py index ed26a0c81..3c2a70f73 100644 --- a/python/gtsam/tests/test_HybridNonlinearFactorGraph.py +++ b/python/gtsam/tests/test_HybridNonlinearFactorGraph.py @@ -17,9 +17,9 @@ import unittest import numpy as np from gtsam.symbol_shorthand import C, X from gtsam.utils.test_case import GtsamTestCase -from gtsam import BetweenFactorPoint3, noiseModel, PriorFactorPoint3, Point3 import gtsam +from gtsam import BetweenFactorPoint3, Point3, PriorFactorPoint3, noiseModel class TestHybridGaussianFactorGraph(GtsamTestCase): @@ -34,7 +34,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): PriorFactorPoint3(2, Point3(1, 2, 3), noiseModel.Diagonal.Variances([0.5, 0.5, 0.5]))) nlfg.push_back( - gtsam.MixtureFactor([1], dk, [ + gtsam.HybridNonlinearFactor([1], dk, [ PriorFactorPoint3(1, Point3(0, 0, 0), noiseModel.Unit.Create(3)), PriorFactorPoint3(1, Point3(1, 2, 1), From aef273bce8299a561b5b9829125688116076bfc4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 13 Sep 2024 05:41:24 -0400 Subject: [PATCH 210/398] rename GaussianMixture to HybridGaussianConditional --- doc/Hybrid.lyx | 12 ++-- gtsam/hybrid/GaussianMixture.cpp | 58 +++++++++---------- gtsam/hybrid/GaussianMixture.h | 32 +++++----- gtsam/hybrid/HybridBayesNet.cpp | 8 +-- gtsam/hybrid/HybridBayesNet.h | 4 +- gtsam/hybrid/HybridConditional.cpp | 2 +- gtsam/hybrid/HybridConditional.h | 18 +++--- gtsam/hybrid/HybridFactor.h | 2 +- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 14 ++--- gtsam/hybrid/HybridNonlinearFactorGraph.cpp | 6 +- gtsam/hybrid/HybridSmoother.cpp | 2 +- gtsam/hybrid/HybridSmoother.h | 2 +- gtsam/hybrid/hybrid.i | 12 ++-- gtsam/hybrid/tests/TinyHybridExample.h | 2 +- gtsam/hybrid/tests/testGaussianMixture.cpp | 28 ++++----- .../tests/testGaussianMixtureFactor.cpp | 12 ++-- gtsam/hybrid/tests/testHybridBayesNet.cpp | 4 +- gtsam/hybrid/tests/testHybridConditional.cpp | 4 +- gtsam/hybrid/tests/testHybridEstimation.cpp | 4 +- .../tests/testHybridGaussianFactorGraph.cpp | 20 +++---- gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 16 ++--- .../tests/testHybridNonlinearFactorGraph.cpp | 2 +- .../hybrid/tests/testHybridNonlinearISAM.cpp | 16 ++--- .../hybrid/tests/testSerializationHybrid.cpp | 22 +++---- gtsam/inference/Conditional.h | 2 +- python/gtsam/tests/test_HybridBayesNet.py | 7 ++- python/gtsam/tests/test_HybridFactorGraph.py | 10 ++-- 27 files changed, 161 insertions(+), 160 deletions(-) diff --git a/doc/Hybrid.lyx b/doc/Hybrid.lyx index b5901d495..17d7094e1 100644 --- a/doc/Hybrid.lyx +++ b/doc/Hybrid.lyx @@ -191,13 +191,13 @@ E_{gc}(x,y)=\frac{1}{2}\|Rx+Sy-d\|_{\Sigma}^{2}.\label{eq:gc_error} \end_layout \begin_layout Subsubsection* -GaussianMixture +HybridGaussianConditional \end_layout \begin_layout Standard A \emph on -GaussianMixture +HybridGaussianConditional \emph default (maybe to be renamed to \emph on @@ -233,7 +233,7 @@ GaussianConditional to a set of discrete variables. As \emph on -GaussianMixture +HybridGaussianConditional \emph default is a \emph on @@ -324,7 +324,7 @@ The key point here is that \color inherit is the log-normalization constant for the complete \emph on -GaussianMixture +HybridGaussianConditional \emph default across all values of \begin_inset Formula $m$ @@ -556,7 +556,7 @@ Analogously, a \emph on HybridGaussianFactor \emph default - typically results from a GaussianMixture by having known values + typically results from a HybridGaussianConditional by having known values \begin_inset Formula $\bar{x}$ \end_inset @@ -817,7 +817,7 @@ E_{mf}(y,m)=\frac{1}{2}\|A_{m}y-b_{m}\|_{\Sigma_{mfm}}^{2}=E_{gcm}(\bar{x},y)+K_ \end_inset -which is identical to the GaussianMixture error +which is identical to the HybridGaussianConditional error \begin_inset CommandInset ref LatexCommand eqref reference "eq:gm_error" diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index f1f1ce429..451493ae0 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file GaussianMixture.cpp + * @file HybridGaussianConditional.cpp * @brief A hybrid conditional in the Conditional Linear Gaussian scheme * @author Fan Jiang * @author Varun Agrawal @@ -20,7 +20,7 @@ #include #include -#include +#include #include #include #include @@ -29,10 +29,10 @@ namespace gtsam { -GaussianMixture::GaussianMixture( +HybridGaussianConditional::HybridGaussianConditional( const KeyVector &continuousFrontals, const KeyVector &continuousParents, const DiscreteKeys &discreteParents, - const GaussianMixture::Conditionals &conditionals) + const HybridGaussianConditional::Conditionals &conditionals) : BaseFactor(CollectKeys(continuousFrontals, continuousParents), discreteParents), BaseConditional(continuousFrontals.size()), @@ -50,30 +50,30 @@ GaussianMixture::GaussianMixture( } /* *******************************************************************************/ -const GaussianMixture::Conditionals &GaussianMixture::conditionals() const { +const HybridGaussianConditional::Conditionals &HybridGaussianConditional::conditionals() const { return conditionals_; } /* *******************************************************************************/ -GaussianMixture::GaussianMixture( +HybridGaussianConditional::HybridGaussianConditional( KeyVector &&continuousFrontals, KeyVector &&continuousParents, DiscreteKeys &&discreteParents, std::vector &&conditionals) - : GaussianMixture(continuousFrontals, continuousParents, discreteParents, + : HybridGaussianConditional(continuousFrontals, continuousParents, discreteParents, Conditionals(discreteParents, conditionals)) {} /* *******************************************************************************/ -GaussianMixture::GaussianMixture( +HybridGaussianConditional::HybridGaussianConditional( const KeyVector &continuousFrontals, const KeyVector &continuousParents, const DiscreteKeys &discreteParents, const std::vector &conditionals) - : GaussianMixture(continuousFrontals, continuousParents, discreteParents, + : HybridGaussianConditional(continuousFrontals, continuousParents, discreteParents, Conditionals(discreteParents, conditionals)) {} /* *******************************************************************************/ -// TODO(dellaert): This is copy/paste: GaussianMixture should be derived from +// TODO(dellaert): This is copy/paste: HybridGaussianConditional should be derived from // GaussianMixtureFactor, no? -GaussianFactorGraphTree GaussianMixture::add( +GaussianFactorGraphTree HybridGaussianConditional::add( const GaussianFactorGraphTree &sum) const { using Y = GaussianFactorGraph; auto add = [](const Y &graph1, const Y &graph2) { @@ -86,7 +86,7 @@ GaussianFactorGraphTree GaussianMixture::add( } /* *******************************************************************************/ -GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { +GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree() const { auto wrap = [this](const GaussianConditional::shared_ptr &gc) { // First check if conditional has not been pruned if (gc) { @@ -109,7 +109,7 @@ GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { } /* *******************************************************************************/ -size_t GaussianMixture::nrComponents() const { +size_t HybridGaussianConditional::nrComponents() const { size_t total = 0; conditionals_.visit([&total](const GaussianFactor::shared_ptr &node) { if (node) total += 1; @@ -118,7 +118,7 @@ size_t GaussianMixture::nrComponents() const { } /* *******************************************************************************/ -GaussianConditional::shared_ptr GaussianMixture::operator()( +GaussianConditional::shared_ptr HybridGaussianConditional::operator()( const DiscreteValues &discreteValues) const { auto &ptr = conditionals_(discreteValues); if (!ptr) return nullptr; @@ -127,11 +127,11 @@ GaussianConditional::shared_ptr GaussianMixture::operator()( return conditional; else throw std::logic_error( - "A GaussianMixture unexpectedly contained a non-conditional"); + "A HybridGaussianConditional unexpectedly contained a non-conditional"); } /* *******************************************************************************/ -bool GaussianMixture::equals(const HybridFactor &lf, double tol) const { +bool HybridGaussianConditional::equals(const HybridFactor &lf, double tol) const { const This *e = dynamic_cast(&lf); if (e == nullptr) return false; @@ -149,7 +149,7 @@ bool GaussianMixture::equals(const HybridFactor &lf, double tol) const { } /* *******************************************************************************/ -void GaussianMixture::print(const std::string &s, +void HybridGaussianConditional::print(const std::string &s, const KeyFormatter &formatter) const { std::cout << (s.empty() ? "" : s + "\n"); if (isContinuous()) std::cout << "Continuous "; @@ -177,7 +177,7 @@ void GaussianMixture::print(const std::string &s, } /* ************************************************************************* */ -KeyVector GaussianMixture::continuousParents() const { +KeyVector HybridGaussianConditional::continuousParents() const { // Get all parent keys: const auto range = parents(); KeyVector continuousParentKeys(range.begin(), range.end()); @@ -193,7 +193,7 @@ KeyVector GaussianMixture::continuousParents() const { } /* ************************************************************************* */ -bool GaussianMixture::allFrontalsGiven(const VectorValues &given) const { +bool HybridGaussianConditional::allFrontalsGiven(const VectorValues &given) const { for (auto &&kv : given) { if (given.find(kv.first) == given.end()) { return false; @@ -203,11 +203,11 @@ bool GaussianMixture::allFrontalsGiven(const VectorValues &given) const { } /* ************************************************************************* */ -std::shared_ptr GaussianMixture::likelihood( +std::shared_ptr HybridGaussianConditional::likelihood( const VectorValues &given) const { if (!allFrontalsGiven(given)) { throw std::runtime_error( - "GaussianMixture::likelihood: given values are missing some frontals."); + "HybridGaussianConditional::likelihood: given values are missing some frontals."); } const DiscreteKeys discreteParentKeys = discreteKeys(); @@ -252,7 +252,7 @@ std::set DiscreteKeysAsSet(const DiscreteKeys &discreteKeys) { */ std::function &, const GaussianConditional::shared_ptr &)> -GaussianMixture::prunerFunc(const DecisionTreeFactor &discreteProbs) { +HybridGaussianConditional::prunerFunc(const DecisionTreeFactor &discreteProbs) { // Get the discrete keys as sets for the decision tree // and the gaussian mixture. auto discreteProbsKeySet = DiscreteKeysAsSet(discreteProbs.discreteKeys()); @@ -303,7 +303,7 @@ GaussianMixture::prunerFunc(const DecisionTreeFactor &discreteProbs) { } /* *******************************************************************************/ -void GaussianMixture::prune(const DecisionTreeFactor &discreteProbs) { +void HybridGaussianConditional::prune(const DecisionTreeFactor &discreteProbs) { // Functional which loops over all assignments and create a set of // GaussianConditionals auto pruner = prunerFunc(discreteProbs); @@ -313,7 +313,7 @@ void GaussianMixture::prune(const DecisionTreeFactor &discreteProbs) { } /* *******************************************************************************/ -AlgebraicDecisionTree GaussianMixture::logProbability( +AlgebraicDecisionTree HybridGaussianConditional::logProbability( const VectorValues &continuousValues) const { // functor to calculate (double) logProbability value from // GaussianConditional. @@ -331,7 +331,7 @@ AlgebraicDecisionTree GaussianMixture::logProbability( } /* ************************************************************************* */ -double GaussianMixture::conditionalError( +double HybridGaussianConditional::conditionalError( const GaussianConditional::shared_ptr &conditional, const VectorValues &continuousValues) const { // Check if valid pointer @@ -348,7 +348,7 @@ double GaussianMixture::conditionalError( } /* *******************************************************************************/ -AlgebraicDecisionTree GaussianMixture::errorTree( +AlgebraicDecisionTree HybridGaussianConditional::errorTree( const VectorValues &continuousValues) const { auto errorFunc = [&](const GaussianConditional::shared_ptr &conditional) { return conditionalError(conditional, continuousValues); @@ -358,20 +358,20 @@ AlgebraicDecisionTree GaussianMixture::errorTree( } /* *******************************************************************************/ -double GaussianMixture::error(const HybridValues &values) const { +double HybridGaussianConditional::error(const HybridValues &values) const { // Directly index to get the conditional, no need to build the whole tree. auto conditional = conditionals_(values.discrete()); return conditionalError(conditional, values.continuous()); } /* *******************************************************************************/ -double GaussianMixture::logProbability(const HybridValues &values) const { +double HybridGaussianConditional::logProbability(const HybridValues &values) const { auto conditional = conditionals_(values.discrete()); return conditional->logProbability(values.continuous()); } /* *******************************************************************************/ -double GaussianMixture::evaluate(const HybridValues &values) const { +double HybridGaussianConditional::evaluate(const HybridValues &values) const { auto conditional = conditionals_(values.discrete()); return conditional->evaluate(values.continuous()); } diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index 10feaf55b..87ddb2cb8 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file GaussianMixture.h + * @file HybridGaussianConditional.h * @brief A hybrid conditional in the Conditional Linear Gaussian scheme * @author Fan Jiang * @author Varun Agrawal @@ -50,14 +50,14 @@ class HybridValues; * * @ingroup hybrid */ -class GTSAM_EXPORT GaussianMixture +class GTSAM_EXPORT HybridGaussianConditional : public HybridFactor, - public Conditional { + public Conditional { public: - using This = GaussianMixture; - using shared_ptr = std::shared_ptr; + using This = HybridGaussianConditional; + using shared_ptr = std::shared_ptr; using BaseFactor = HybridFactor; - using BaseConditional = Conditional; + using BaseConditional = Conditional; /// typedef for Decision Tree of Gaussian Conditionals using Conditionals = DecisionTree; @@ -67,7 +67,7 @@ class GTSAM_EXPORT GaussianMixture double logConstant_; ///< log of the normalization constant. /** - * @brief Convert a GaussianMixture of conditionals into + * @brief Convert a HybridGaussianConditional of conditionals into * a DecisionTree of Gaussian factor graphs. */ GaussianFactorGraphTree asGaussianFactorGraphTree() const; @@ -88,10 +88,10 @@ class GTSAM_EXPORT GaussianMixture /// @{ /// Default constructor, mainly for serialization. - GaussianMixture() = default; + HybridGaussianConditional() = default; /** - * @brief Construct a new GaussianMixture object. + * @brief Construct a new HybridGaussianConditional object. * * @param continuousFrontals the continuous frontals. * @param continuousParents the continuous parents. @@ -101,7 +101,7 @@ class GTSAM_EXPORT GaussianMixture * cardinality of the DiscreteKeys in discreteParents, since the * discreteParents will be used as the labels in the decision tree. */ - GaussianMixture(const KeyVector &continuousFrontals, + HybridGaussianConditional(const KeyVector &continuousFrontals, const KeyVector &continuousParents, const DiscreteKeys &discreteParents, const Conditionals &conditionals); @@ -114,7 +114,7 @@ class GTSAM_EXPORT GaussianMixture * @param discreteParents Discrete parents variables * @param conditionals List of conditionals */ - GaussianMixture(KeyVector &&continuousFrontals, KeyVector &&continuousParents, + HybridGaussianConditional(KeyVector &&continuousFrontals, KeyVector &&continuousParents, DiscreteKeys &&discreteParents, std::vector &&conditionals); @@ -126,7 +126,7 @@ class GTSAM_EXPORT GaussianMixture * @param discreteParents Discrete parents variables * @param conditionals List of conditionals */ - GaussianMixture( + HybridGaussianConditional( const KeyVector &continuousFrontals, const KeyVector &continuousParents, const DiscreteKeys &discreteParents, const std::vector &conditionals); @@ -140,7 +140,7 @@ class GTSAM_EXPORT GaussianMixture /// Print utility void print( - const std::string &s = "GaussianMixture\n", + const std::string &s = "HybridGaussianConditional\n", const KeyFormatter &formatter = DefaultKeyFormatter) const override; /// @} @@ -172,7 +172,7 @@ class GTSAM_EXPORT GaussianMixture const Conditionals &conditionals() const; /** - * @brief Compute logProbability of the GaussianMixture as a tree. + * @brief Compute logProbability of the HybridGaussianConditional as a tree. * * @param continuousValues The continuous VectorValues. * @return AlgebraicDecisionTree A decision tree with the same keys @@ -209,7 +209,7 @@ class GTSAM_EXPORT GaussianMixture double error(const HybridValues &values) const override; /** - * @brief Compute error of the GaussianMixture as a tree. + * @brief Compute error of the HybridGaussianConditional as a tree. * * @param continuousValues The continuous VectorValues. * @return AlgebraicDecisionTree A decision tree on the discrete keys @@ -277,6 +277,6 @@ std::set DiscreteKeysAsSet(const DiscreteKeys &discreteKeys); // traits template <> -struct traits : public Testable {}; +struct traits : public Testable {}; } // namespace gtsam diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 1d01baed2..90aa6cde2 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -168,11 +168,11 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) { DecisionTreeFactor prunedDiscreteProbs = this->pruneDiscreteConditionals(maxNrLeaves); - /* To prune, we visitWith every leaf in the GaussianMixture. + /* To prune, we visitWith every leaf in the HybridGaussianConditional. * For each leaf, using the assignment we can check the discrete decision tree * for 0.0 probability, then just set the leaf to a nullptr. * - * We can later check the GaussianMixture for just nullptrs. + * We can later check the HybridGaussianConditional for just nullptrs. */ HybridBayesNet prunedBayesNetFragment; @@ -182,14 +182,14 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) { for (auto &&conditional : *this) { if (auto gm = conditional->asMixture()) { // Make a copy of the Gaussian mixture and prune it! - auto prunedGaussianMixture = std::make_shared(*gm); + auto prunedGaussianMixture = std::make_shared(*gm); prunedGaussianMixture->prune(prunedDiscreteProbs); // imperative :-( // Type-erase and add to the pruned Bayes Net fragment. prunedBayesNetFragment.push_back(prunedGaussianMixture); } else { - // Add the non-GaussianMixture conditional + // Add the non-HybridGaussianConditional conditional prunedBayesNetFragment.push_back(conditional); } } diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 8fae4061d..5c453c106 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -79,7 +79,7 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { * * Example: * auto shared_ptr_to_a_conditional = - * std::make_shared(...); + * std::make_shared(...); * hbn.push_back(shared_ptr_to_a_conditional); */ void push_back(HybridConditional &&conditional) { @@ -106,7 +106,7 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { * Preferred: Emplace a conditional directly using arguments. * * Examples: - * hbn.emplace_shared(...))); + * hbn.emplace_shared(...))); * hbn.emplace_shared(...))); * hbn.emplace_shared(...))); */ diff --git a/gtsam/hybrid/HybridConditional.cpp b/gtsam/hybrid/HybridConditional.cpp index 6c4893476..5ef74ea7d 100644 --- a/gtsam/hybrid/HybridConditional.cpp +++ b/gtsam/hybrid/HybridConditional.cpp @@ -55,7 +55,7 @@ HybridConditional::HybridConditional( /* ************************************************************************ */ HybridConditional::HybridConditional( - const std::shared_ptr &gaussianMixture) + const std::shared_ptr &gaussianMixture) : BaseFactor(KeyVector(gaussianMixture->keys().begin(), gaussianMixture->keys().begin() + gaussianMixture->nrContinuous()), diff --git a/gtsam/hybrid/HybridConditional.h b/gtsam/hybrid/HybridConditional.h index fb6542822..d02b69127 100644 --- a/gtsam/hybrid/HybridConditional.h +++ b/gtsam/hybrid/HybridConditional.h @@ -18,7 +18,7 @@ #pragma once #include -#include +#include #include #include #include @@ -39,7 +39,7 @@ namespace gtsam { * As a type-erased variant of: * - DiscreteConditional * - GaussianConditional - * - GaussianMixture + * - HybridGaussianConditional * * The reason why this is important is that `Conditional` is a CRTP class. * CRTP is static polymorphism such that all CRTP classes, while bearing the @@ -127,7 +127,7 @@ class GTSAM_EXPORT HybridConditional * @param gaussianMixture Gaussian Mixture Conditional used to create the * HybridConditional. */ - HybridConditional(const std::shared_ptr& gaussianMixture); + HybridConditional(const std::shared_ptr& gaussianMixture); /// @} /// @name Testable @@ -146,12 +146,12 @@ class GTSAM_EXPORT HybridConditional /// @{ /** - * @brief Return HybridConditional as a GaussianMixture + * @brief Return HybridConditional as a HybridGaussianConditional * @return nullptr if not a mixture - * @return GaussianMixture::shared_ptr otherwise + * @return HybridGaussianConditional::shared_ptr otherwise */ - GaussianMixture::shared_ptr asMixture() const { - return std::dynamic_pointer_cast(inner_); + HybridGaussianConditional::shared_ptr asMixture() const { + return std::dynamic_pointer_cast(inner_); } /** @@ -222,8 +222,8 @@ class GTSAM_EXPORT HybridConditional boost::serialization::void_cast_register( static_cast(NULL), static_cast(NULL)); } else { - boost::serialization::void_cast_register( - static_cast(NULL), static_cast(NULL)); + boost::serialization::void_cast_register( + static_cast(NULL), static_cast(NULL)); } } #endif diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index 2a572ea65..c66116512 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -47,7 +47,7 @@ DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1, * Examples: * - HybridNonlinearFactor * - HybridGaussianFactor - * - GaussianMixture + * - HybridGaussianConditional * * @ingroup hybrid */ diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 24669863e..656060ae6 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -23,7 +23,7 @@ #include #include #include -#include +#include #include #include #include @@ -180,7 +180,7 @@ GaussianFactorGraphTree HybridGaussianFactorGraph::assembleGraphTree() const { result = addGaussian(result, gf); } else if (auto gmf = dynamic_pointer_cast(f)) { result = gmf->add(result); - } else if (auto gm = dynamic_pointer_cast(f)) { + } else if (auto gm = dynamic_pointer_cast(f)) { result = gm->add(result); } else if (auto hc = dynamic_pointer_cast(f)) { if (auto gm = hc->asMixture()) { @@ -408,10 +408,10 @@ hybridElimination(const HybridGaussianFactorGraph &factors, : createGaussianMixtureFactor(eliminationResults, continuousSeparator, discreteSeparator); - // Create the GaussianMixture from the conditionals - GaussianMixture::Conditionals conditionals( + // Create the HybridGaussianConditional from the conditionals + HybridGaussianConditional::Conditionals conditionals( eliminationResults, [](const Result &pair) { return pair.first; }); - auto gaussianMixture = std::make_shared( + auto gaussianMixture = std::make_shared( frontalKeys, continuousSeparator, discreteSeparator, conditionals); return {std::make_shared(gaussianMixture), newFactor}; @@ -458,7 +458,7 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, // Because of all these reasons, we carefully consider how to // implement the hybrid factors so that we do not get poor performance. - // The first thing is how to represent the GaussianMixture. + // The first thing is how to represent the HybridGaussianConditional. // A very possible scenario is that the incoming factors will have different // levels of discrete keys. For example, imagine we are going to eliminate the // fragment: $\phi(x1,c1,c2)$, $\phi(x1,c2,c3)$, which is perfectly valid. @@ -599,7 +599,7 @@ GaussianFactorGraph HybridGaussianFactorGraph::operator()( gfg.push_back(gf); } else if (auto gmf = std::dynamic_pointer_cast(f)) { gfg.push_back((*gmf)(assignment)); - } else if (auto gm = dynamic_pointer_cast(f)) { + } else if (auto gm = dynamic_pointer_cast(f)) { gfg.push_back((*gm)(assignment)); } else { continue; diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp index 35a8ea355..76703ad2d 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -18,7 +18,7 @@ #include #include -#include +#include #include #include #include @@ -80,7 +80,7 @@ void HybridNonlinearFactorGraph::printErrors( gmf->errorTree(values.continuous()).print("", keyFormatter); std::cout << std::endl; } - } else if (auto gm = std::dynamic_pointer_cast(factor)) { + } else if (auto gm = std::dynamic_pointer_cast(factor)) { if (factor == nullptr) { std::cout << "nullptr" << "\n"; @@ -163,7 +163,7 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize( linearFG->push_back(f); } else if (auto gmf = dynamic_pointer_cast(f)) { linearFG->push_back(gmf); - } else if (auto gm = dynamic_pointer_cast(f)) { + } else if (auto gm = dynamic_pointer_cast(f)) { linearFG->push_back(gm); } else if (dynamic_pointer_cast(f)) { linearFG->push_back(f); diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index afa8340d2..546837bf9 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -138,7 +138,7 @@ HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph, } /* ************************************************************************* */ -GaussianMixture::shared_ptr HybridSmoother::gaussianMixture( +HybridGaussianConditional::shared_ptr HybridSmoother::gaussianMixture( size_t index) const { return hybridBayesNet_.at(index)->asMixture(); } diff --git a/gtsam/hybrid/HybridSmoother.h b/gtsam/hybrid/HybridSmoother.h index 5c2e4f546..fc40e0297 100644 --- a/gtsam/hybrid/HybridSmoother.h +++ b/gtsam/hybrid/HybridSmoother.h @@ -69,7 +69,7 @@ class GTSAM_EXPORT HybridSmoother { const HybridBayesNet& hybridBayesNet, const Ordering& ordering) const; /// Get the Gaussian Mixture from the Bayes Net posterior at `index`. - GaussianMixture::shared_ptr gaussianMixture(size_t index) const; + HybridGaussianConditional::shared_ptr gaussianMixture(size_t index) const; /// Return the Bayes Net posterior. const HybridBayesNet& hybridBayesNet() const; diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index e5be00c6f..834a067c3 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -65,7 +65,7 @@ virtual class HybridConditional { double logProbability(const gtsam::HybridValues& values) const; double evaluate(const gtsam::HybridValues& values) const; double operator()(const gtsam::HybridValues& values) const; - gtsam::GaussianMixture* asMixture() const; + gtsam::HybridGaussianConditional* asMixture() const; gtsam::GaussianConditional* asGaussian() const; gtsam::DiscreteConditional* asDiscrete() const; gtsam::Factor* inner(); @@ -84,9 +84,9 @@ class HybridGaussianFactor : gtsam::HybridFactor { gtsam::DefaultKeyFormatter) const; }; -#include -class GaussianMixture : gtsam::HybridFactor { - GaussianMixture(const gtsam::KeyVector& continuousFrontals, +#include +class HybridGaussianConditional : gtsam::HybridFactor { + HybridGaussianConditional(const gtsam::KeyVector& continuousFrontals, const gtsam::KeyVector& continuousParents, const gtsam::DiscreteKeys& discreteParents, const std::vector& @@ -97,7 +97,7 @@ class GaussianMixture : gtsam::HybridFactor { double logProbability(const gtsam::HybridValues& values) const; double evaluate(const gtsam::HybridValues& values) const; - void print(string s = "GaussianMixture\n", + void print(string s = "HybridGaussianConditional\n", const gtsam::KeyFormatter& keyFormatter = gtsam::DefaultKeyFormatter) const; }; @@ -131,7 +131,7 @@ class HybridBayesTree { #include class HybridBayesNet { HybridBayesNet(); - void push_back(const gtsam::GaussianMixture* s); + void push_back(const gtsam::HybridGaussianConditional* s); void push_back(const gtsam::GaussianConditional* s); void push_back(const gtsam::DiscreteConditional* s); diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index 26b83db29..00af28308 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -43,7 +43,7 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1, // Create Gaussian mixture z_i = x0 + noise for each measurement. for (size_t i = 0; i < num_measurements; i++) { const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode; - bayesNet.emplace_shared( + bayesNet.emplace_shared( KeyVector{Z(i)}, KeyVector{X(0)}, DiscreteKeys{mode_i}, std::vector{GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 0.5), diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index a379dd036..116e1119d 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -11,7 +11,7 @@ /** * @file testGaussianMixture.cpp - * @brief Unit tests for GaussianMixture class + * @brief Unit tests for HybridGaussianConditional class * @author Varun Agrawal * @author Fan Jiang * @author Frank Dellaert @@ -19,7 +19,7 @@ */ #include -#include +#include #include #include #include @@ -46,19 +46,19 @@ static const HybridValues hv1{vv, assignment1}; /* ************************************************************************* */ namespace equal_constants { -// Create a simple GaussianMixture +// Create a simple HybridGaussianConditional const double commonSigma = 2.0; const std::vector conditionals{ GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), commonSigma), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), commonSigma)}; -const GaussianMixture mixture({Z(0)}, {X(0)}, {mode}, conditionals); +const HybridGaussianConditional mixture({Z(0)}, {X(0)}, {mode}, conditionals); } // namespace equal_constants /* ************************************************************************* */ /// Check that invariants hold -TEST(GaussianMixture, Invariants) { +TEST(HybridGaussianConditional, Invariants) { using namespace equal_constants; // Check that the mixture normalization constant is the max of all constants @@ -67,13 +67,13 @@ TEST(GaussianMixture, Invariants) { EXPECT_DOUBLES_EQUAL(K, conditionals[0]->logNormalizationConstant(), 1e-8); EXPECT_DOUBLES_EQUAL(K, conditionals[1]->logNormalizationConstant(), 1e-8); - EXPECT(GaussianMixture::CheckInvariants(mixture, hv0)); - EXPECT(GaussianMixture::CheckInvariants(mixture, hv1)); + EXPECT(HybridGaussianConditional::CheckInvariants(mixture, hv0)); + EXPECT(HybridGaussianConditional::CheckInvariants(mixture, hv1)); } /* ************************************************************************* */ /// Check LogProbability. -TEST(GaussianMixture, LogProbability) { +TEST(HybridGaussianConditional, LogProbability) { using namespace equal_constants; auto actual = mixture.logProbability(vv); @@ -95,7 +95,7 @@ TEST(GaussianMixture, LogProbability) { /* ************************************************************************* */ /// Check error. -TEST(GaussianMixture, Error) { +TEST(HybridGaussianConditional, Error) { using namespace equal_constants; auto actual = mixture.errorTree(vv); @@ -118,7 +118,7 @@ TEST(GaussianMixture, Error) { /* ************************************************************************* */ /// Check that the likelihood is proportional to the conditional density given /// the measurements. -TEST(GaussianMixture, Likelihood) { +TEST(HybridGaussianConditional, Likelihood) { using namespace equal_constants; // Compute likelihood @@ -147,19 +147,19 @@ TEST(GaussianMixture, Likelihood) { /* ************************************************************************* */ namespace mode_dependent_constants { -// Create a GaussianMixture with mode-dependent noise models. +// Create a HybridGaussianConditional with mode-dependent noise models. // 0 is low-noise, 1 is high-noise. const std::vector conditionals{ GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), 0.5), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), 3.0)}; -const GaussianMixture mixture({Z(0)}, {X(0)}, {mode}, conditionals); +const HybridGaussianConditional mixture({Z(0)}, {X(0)}, {mode}, conditionals); } // namespace mode_dependent_constants /* ************************************************************************* */ // Create a test for continuousParents. -TEST(GaussianMixture, ContinuousParents) { +TEST(HybridGaussianConditional, ContinuousParents) { using namespace mode_dependent_constants; const KeyVector continuousParentKeys = mixture.continuousParents(); // Check that the continuous parent keys are correct: @@ -170,7 +170,7 @@ TEST(GaussianMixture, ContinuousParents) { /* ************************************************************************* */ /// Check that the likelihood is proportional to the conditional density given /// the measurements. -TEST(GaussianMixture, Likelihood2) { +TEST(HybridGaussianConditional, Likelihood2) { using namespace mode_dependent_constants; // Compute likelihood diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index cd20e7215..b90950605 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -20,7 +20,7 @@ #include #include -#include +#include #include #include #include @@ -144,7 +144,7 @@ Hybrid [x1 x2; 1]{ } /* ************************************************************************* */ -TEST(HybridGaussianFactor, GaussianMixture) { +TEST(HybridGaussianFactor, HybridGaussianConditional) { KeyVector keys; keys.push_back(X(0)); keys.push_back(X(1)); @@ -154,8 +154,8 @@ TEST(HybridGaussianFactor, GaussianMixture) { dKeys.emplace_back(M(1), 2); auto gaussians = std::make_shared(); - GaussianMixture::Conditionals conditionals(gaussians); - GaussianMixture gm({}, keys, dKeys, conditionals); + HybridGaussianConditional::Conditionals conditionals(gaussians); + HybridGaussianConditional gm({}, keys, dKeys, conditionals); EXPECT_LONGS_EQUAL(2, gm.discreteKeys().size()); } @@ -229,7 +229,7 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, c1 = make_shared(z, Vector1(mu1), I_1x1, model1); HybridBayesNet hbn; - hbn.emplace_shared(KeyVector{z}, KeyVector{}, + hbn.emplace_shared(KeyVector{z}, KeyVector{}, DiscreteKeys{m}, std::vector{c0, c1}); auto mixing = make_shared(m, "0.5/0.5"); @@ -413,7 +413,7 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, c1 = make_shared(x1, Vector1(mu1), I_1x1, x0, -I_1x1, model1); - auto motion = std::make_shared( + auto motion = std::make_shared( KeyVector{x1}, KeyVector{x0}, DiscreteKeys{m1}, std::vector{c0, c1}); hbn.push_back(motion); diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 9b7ba9744..b35468302 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -107,7 +107,7 @@ TEST(HybridBayesNet, evaluateHybrid) { // Create hybrid Bayes net. HybridBayesNet bayesNet; bayesNet.push_back(continuousConditional); - bayesNet.emplace_shared( + bayesNet.emplace_shared( KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia}, std::vector{conditional0, conditional1}); bayesNet.emplace_shared(Asia, "99/1"); @@ -168,7 +168,7 @@ TEST(HybridBayesNet, Error) { conditional1 = std::make_shared( X(1), Vector1::Constant(2), I_1x1, model1); - auto gm = std::make_shared( + auto gm = std::make_shared( KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia}, std::vector{conditional0, conditional1}); // Create hybrid Bayes net. diff --git a/gtsam/hybrid/tests/testHybridConditional.cpp b/gtsam/hybrid/tests/testHybridConditional.cpp index 406306df7..78dbd314c 100644 --- a/gtsam/hybrid/tests/testHybridConditional.cpp +++ b/gtsam/hybrid/tests/testHybridConditional.cpp @@ -43,9 +43,9 @@ TEST(HybridConditional, Invariants) { auto hc0 = bn.at(0); CHECK(hc0->isHybrid()); - // Check invariants as a GaussianMixture. + // Check invariants as a HybridGaussianConditional. const auto mixture = hc0->asMixture(); - EXPECT(GaussianMixture::CheckInvariants(*mixture, values)); + EXPECT(HybridGaussianConditional::CheckInvariants(*mixture, values)); // Check invariants as a HybridConditional. EXPECT(HybridConditional::CheckInvariants(*hc0, values)); diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 295633916..299562e32 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -616,7 +616,7 @@ TEST(HybridEstimation, ModeSelection) { GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1)); bn.push_back( GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1)); - bn.emplace_shared( + bn.emplace_shared( KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode}, std::vector{GaussianConditional::sharedMeanAndStddev( Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_loose), @@ -647,7 +647,7 @@ TEST(HybridEstimation, ModeSelection2) { GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1)); bn.push_back( GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1)); - bn.emplace_shared( + bn.emplace_shared( KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode}, std::vector{GaussianConditional::sharedMeanAndStddev( Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_loose), diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 96d0c478e..3386daac8 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -21,7 +21,7 @@ #include #include #include -#include +#include #include #include #include @@ -71,8 +71,8 @@ TEST(HybridGaussianFactorGraph, Creation) { // Define a gaussian mixture conditional P(x0|x1, c0) and add it to the factor // graph - GaussianMixture gm({X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}), - GaussianMixture::Conditionals( + HybridGaussianConditional gm({X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}), + HybridGaussianConditional::Conditionals( M(0), std::make_shared( X(0), Z_3x1, I_3x3, X(1), I_3x3), @@ -681,7 +681,7 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) { x0, -I_1x1, model0), c1 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, x0, -I_1x1, model1); - hbn.emplace_shared(KeyVector{f01}, KeyVector{x0, x1}, + hbn.emplace_shared(KeyVector{f01}, KeyVector{x0, x1}, DiscreteKeys{m1}, std::vector{c0, c1}); // Discrete uniform prior. @@ -805,7 +805,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) { X(0), Vector1(14.1421), I_1x1 * 2.82843), conditional1 = std::make_shared( X(0), Vector1(10.1379), I_1x1 * 2.02759); - expectedBayesNet.emplace_shared( + expectedBayesNet.emplace_shared( KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, std::vector{conditional0, conditional1}); @@ -830,7 +830,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { HybridBayesNet bn; // Create Gaussian mixture z_0 = x0 + noise for each measurement. - auto gm = std::make_shared( + auto gm = std::make_shared( KeyVector{Z(0)}, KeyVector{X(0)}, DiscreteKeys{mode}, std::vector{ GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3), @@ -862,7 +862,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { X(0), Vector1(10.1379), I_1x1 * 2.02759), conditional1 = std::make_shared( X(0), Vector1(14.1421), I_1x1 * 2.82843); - expectedBayesNet.emplace_shared( + expectedBayesNet.emplace_shared( KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, std::vector{conditional0, conditional1}); @@ -899,7 +899,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) { X(0), Vector1(17.3205), I_1x1 * 3.4641), conditional1 = std::make_shared( X(0), Vector1(10.274), I_1x1 * 2.0548); - expectedBayesNet.emplace_shared( + expectedBayesNet.emplace_shared( KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, std::vector{conditional0, conditional1}); @@ -946,7 +946,7 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { for (size_t t : {0, 1, 2}) { // Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t): const auto noise_mode_t = DiscreteKey{N(t), 2}; - bn.emplace_shared( + bn.emplace_shared( KeyVector{Z(t)}, KeyVector{X(t)}, DiscreteKeys{noise_mode_t}, std::vector{GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), Z_1x1, 0.5), @@ -961,7 +961,7 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { for (size_t t : {2, 1}) { // Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1): const auto motion_model_t = DiscreteKey{M(t), 2}; - auto gm = std::make_shared( + auto gm = std::make_shared( KeyVector{X(t)}, KeyVector{X(t - 1)}, DiscreteKeys{motion_model_t}, std::vector{GaussianConditional::sharedMeanAndStddev( X(t), I_1x1, X(t - 1), Z_1x1, 0.2), diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index d2b0fded3..8b15b50d2 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -134,22 +134,22 @@ TEST(HybridGaussianElimination, IncrementalInference) { // The densities on X(0) should be the same auto x0_conditional = - dynamic_pointer_cast(isam[X(0)]->conditional()->inner()); - auto expected_x0_conditional = dynamic_pointer_cast( + dynamic_pointer_cast(isam[X(0)]->conditional()->inner()); + auto expected_x0_conditional = dynamic_pointer_cast( (*expectedHybridBayesTree)[X(0)]->conditional()->inner()); EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional)); // The densities on X(1) should be the same auto x1_conditional = - dynamic_pointer_cast(isam[X(1)]->conditional()->inner()); - auto expected_x1_conditional = dynamic_pointer_cast( + dynamic_pointer_cast(isam[X(1)]->conditional()->inner()); + auto expected_x1_conditional = dynamic_pointer_cast( (*expectedHybridBayesTree)[X(1)]->conditional()->inner()); EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional)); // The densities on X(2) should be the same auto x2_conditional = - dynamic_pointer_cast(isam[X(2)]->conditional()->inner()); - auto expected_x2_conditional = dynamic_pointer_cast( + dynamic_pointer_cast(isam[X(2)]->conditional()->inner()); + auto expected_x2_conditional = dynamic_pointer_cast( (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional)); @@ -279,9 +279,9 @@ TEST(HybridGaussianElimination, Approx_inference) { // Check that the hybrid nodes of the bayes net match those of the pre-pruning // bayes net, at the same positions. - auto &unprunedLastDensity = *dynamic_pointer_cast( + auto &unprunedLastDensity = *dynamic_pointer_cast( unprunedHybridBayesTree->clique(X(3))->conditional()->inner()); - auto &lastDensity = *dynamic_pointer_cast( + auto &lastDensity = *dynamic_pointer_cast( incrementalHybrid[X(3)]->conditional()->inner()); std::vector> assignments = diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 9e2816865..3f9a18653 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -350,7 +350,7 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) { EliminateHybrid(factors, ordering); auto gaussianConditionalMixture = - dynamic_pointer_cast(hybridConditionalMixture->inner()); + dynamic_pointer_cast(hybridConditionalMixture->inner()); CHECK(gaussianConditionalMixture); // Frontals = [x0, x1] diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index e2ed4cbad..06a4249d3 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -151,23 +151,23 @@ TEST(HybridNonlinearISAM, IncrementalInference) { .BaseEliminateable::eliminatePartialMultifrontal(ordering); // The densities on X(1) should be the same - auto x0_conditional = dynamic_pointer_cast( + auto x0_conditional = dynamic_pointer_cast( bayesTree[X(0)]->conditional()->inner()); - auto expected_x0_conditional = dynamic_pointer_cast( + auto expected_x0_conditional = dynamic_pointer_cast( (*expectedHybridBayesTree)[X(0)]->conditional()->inner()); EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional)); // The densities on X(1) should be the same - auto x1_conditional = dynamic_pointer_cast( + auto x1_conditional = dynamic_pointer_cast( bayesTree[X(1)]->conditional()->inner()); - auto expected_x1_conditional = dynamic_pointer_cast( + auto expected_x1_conditional = dynamic_pointer_cast( (*expectedHybridBayesTree)[X(1)]->conditional()->inner()); EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional)); // The densities on X(2) should be the same - auto x2_conditional = dynamic_pointer_cast( + auto x2_conditional = dynamic_pointer_cast( bayesTree[X(2)]->conditional()->inner()); - auto expected_x2_conditional = dynamic_pointer_cast( + auto expected_x2_conditional = dynamic_pointer_cast( (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional)); @@ -300,9 +300,9 @@ TEST(HybridNonlinearISAM, Approx_inference) { // Check that the hybrid nodes of the bayes net match those of the pre-pruning // bayes net, at the same positions. - auto &unprunedLastDensity = *dynamic_pointer_cast( + auto &unprunedLastDensity = *dynamic_pointer_cast( unprunedHybridBayesTree->clique(X(3))->conditional()->inner()); - auto &lastDensity = *dynamic_pointer_cast( + auto &lastDensity = *dynamic_pointer_cast( bayesTree[X(3)]->conditional()->inner()); std::vector> assignments = diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp index 31fe61d9b..17196d2ae 100644 --- a/gtsam/hybrid/tests/testSerializationHybrid.cpp +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -18,7 +18,7 @@ #include #include -#include +#include #include #include #include @@ -59,12 +59,12 @@ BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Leaf, BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Choice, "gtsam_GaussianMixtureFactor_Factors_Choice"); -BOOST_CLASS_EXPORT_GUID(GaussianMixture, "gtsam_GaussianMixture"); -BOOST_CLASS_EXPORT_GUID(GaussianMixture::Conditionals, +BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional, "gtsam_GaussianMixture"); +BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals, "gtsam_GaussianMixture_Conditionals"); -BOOST_CLASS_EXPORT_GUID(GaussianMixture::Conditionals::Leaf, +BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals::Leaf, "gtsam_GaussianMixture_Conditionals_Leaf"); -BOOST_CLASS_EXPORT_GUID(GaussianMixture::Conditionals::Choice, +BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals::Choice, "gtsam_GaussianMixture_Conditionals_Choice"); // Needed since GaussianConditional::FromMeanAndStddev uses it BOOST_CLASS_EXPORT_GUID(noiseModel::Isotropic, "gtsam_noiseModel_Isotropic"); @@ -106,20 +106,20 @@ TEST(HybridSerialization, HybridConditional) { } /* ****************************************************************************/ -// Test GaussianMixture serialization. -TEST(HybridSerialization, GaussianMixture) { +// Test HybridGaussianConditional serialization. +TEST(HybridSerialization, HybridGaussianConditional) { const DiscreteKey mode(M(0), 2); Matrix1 I = Matrix1::Identity(); const auto conditional0 = std::make_shared( GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 0.5)); const auto conditional1 = std::make_shared( GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3)); - const GaussianMixture gm({Z(0)}, {X(0)}, {mode}, + const HybridGaussianConditional gm({Z(0)}, {X(0)}, {mode}, {conditional0, conditional1}); - EXPECT(equalsObj(gm)); - EXPECT(equalsXML(gm)); - EXPECT(equalsBinary(gm)); + EXPECT(equalsObj(gm)); + EXPECT(equalsXML(gm)); + EXPECT(equalsBinary(gm)); } /* ****************************************************************************/ diff --git a/gtsam/inference/Conditional.h b/gtsam/inference/Conditional.h index 0ab8b49a4..c76c05ab1 100644 --- a/gtsam/inference/Conditional.h +++ b/gtsam/inference/Conditional.h @@ -46,7 +46,7 @@ namespace gtsam { * Gaussian density over a set of continuous variables. * - \b Discrete conditionals, implemented in \class DiscreteConditional, which * represent a discrete conditional distribution over discrete variables. - * - \b Hybrid conditional densities, such as \class GaussianMixture, which is + * - \b Hybrid conditional densities, such as \class HybridGaussianConditional, which is * a density over continuous variables given discrete/continuous parents. * - \b Symbolic factors, used to represent a graph structure, implemented in * \class SymbolicConditional. Only used for symbolic elimination etc. diff --git a/python/gtsam/tests/test_HybridBayesNet.py b/python/gtsam/tests/test_HybridBayesNet.py index bc685dec6..0fd569908 100644 --- a/python/gtsam/tests/test_HybridBayesNet.py +++ b/python/gtsam/tests/test_HybridBayesNet.py @@ -18,8 +18,9 @@ from gtsam.symbol_shorthand import A, X from gtsam.utils.test_case import GtsamTestCase from gtsam import (DiscreteConditional, DiscreteKeys, DiscreteValues, - GaussianConditional, GaussianMixture, HybridBayesNet, - HybridValues, VectorValues, noiseModel) + GaussianConditional, HybridBayesNet, + HybridGaussianConditional, HybridValues, VectorValues, + noiseModel) class TestHybridBayesNet(GtsamTestCase): @@ -49,7 +50,7 @@ class TestHybridBayesNet(GtsamTestCase): bayesNet = HybridBayesNet() bayesNet.push_back(conditional) bayesNet.push_back( - GaussianMixture([X(1)], [], discrete_keys, + HybridGaussianConditional([X(1)], [], discrete_keys, [conditional0, conditional1])) bayesNet.push_back(DiscreteConditional(Asia, "99/1")) diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index 02a9b7b7d..c7c4c6b51 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -18,9 +18,9 @@ from gtsam.utils.test_case import GtsamTestCase import gtsam from gtsam import (DiscreteConditional, DiscreteKeys, GaussianConditional, - GaussianMixture, HybridBayesNet, HybridGaussianFactor, - HybridGaussianFactorGraph, HybridValues, JacobianFactor, - Ordering, noiseModel) + HybridBayesNet, HybridGaussianConditional, + HybridGaussianFactor, HybridGaussianFactorGraph, + HybridValues, JacobianFactor, Ordering, noiseModel) DEBUG_MARGINALS = False @@ -48,7 +48,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): self.assertEqual(hbn.size(), 2) mixture = hbn.at(0).inner() - self.assertIsInstance(mixture, GaussianMixture) + self.assertIsInstance(mixture, HybridGaussianConditional) self.assertEqual(len(mixture.keys()), 2) discrete_conditional = hbn.at(hbn.size() - 1).inner() @@ -106,7 +106,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): I_1x1, X(0), [0], sigma=3) - bayesNet.push_back(GaussianMixture([Z(i)], [X(0)], keys, + bayesNet.push_back(HybridGaussianConditional([Z(i)], [X(0)], keys, [conditional0, conditional1])) # Create prior on X(0). From 94805798e90d63a4a747d20a42c3afd199c3c431 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 13 Sep 2024 05:59:09 -0400 Subject: [PATCH 211/398] rename files --- ...{GaussianMixture.cpp => HybridGaussianConditional.cpp} | 8 ++++---- .../{GaussianMixture.h => HybridGaussianConditional.h} | 0 ...GaussianMixtureFactor.cpp => HybridGaussianFactor.cpp} | 0 .../{GaussianMixtureFactor.h => HybridGaussianFactor.h} | 0 gtsam/hybrid/{MixtureFactor.h => HybridNonlinearFactor.h} | 0 ...ssianMixture.cpp => testHybridGaussianConditional.cpp} | 0 ...sianMixtureFactor.cpp => testHybridGaussianFactor.cpp} | 2 +- ...estMixtureFactor.cpp => testHybridNonlinearFactor.cpp} | 2 +- 8 files changed, 6 insertions(+), 6 deletions(-) rename gtsam/hybrid/{GaussianMixture.cpp => HybridGaussianConditional.cpp} (98%) rename gtsam/hybrid/{GaussianMixture.h => HybridGaussianConditional.h} (100%) rename gtsam/hybrid/{GaussianMixtureFactor.cpp => HybridGaussianFactor.cpp} (100%) rename gtsam/hybrid/{GaussianMixtureFactor.h => HybridGaussianFactor.h} (100%) rename gtsam/hybrid/{MixtureFactor.h => HybridNonlinearFactor.h} (100%) rename gtsam/hybrid/tests/{testGaussianMixture.cpp => testHybridGaussianConditional.cpp} (100%) rename gtsam/hybrid/tests/{testGaussianMixtureFactor.cpp => testHybridGaussianFactor.cpp} (99%) rename gtsam/hybrid/tests/{testMixtureFactor.cpp => testHybridNonlinearFactor.cpp} (98%) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp similarity index 98% rename from gtsam/hybrid/GaussianMixture.cpp rename to gtsam/hybrid/HybridGaussianConditional.cpp index 451493ae0..4ad85b827 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -72,7 +72,7 @@ HybridGaussianConditional::HybridGaussianConditional( /* *******************************************************************************/ // TODO(dellaert): This is copy/paste: HybridGaussianConditional should be derived from -// GaussianMixtureFactor, no? +// HybridGaussianFactor, no? GaussianFactorGraphTree HybridGaussianConditional::add( const GaussianFactorGraphTree &sum) const { using Y = GaussianFactorGraph; @@ -203,7 +203,7 @@ bool HybridGaussianConditional::allFrontalsGiven(const VectorValues &given) cons } /* ************************************************************************* */ -std::shared_ptr HybridGaussianConditional::likelihood( +std::shared_ptr HybridGaussianConditional::likelihood( const VectorValues &given) const { if (!allFrontalsGiven(given)) { throw std::runtime_error( @@ -212,7 +212,7 @@ std::shared_ptr HybridGaussianConditional::likelihood( const DiscreteKeys discreteParentKeys = discreteKeys(); const KeyVector continuousParentKeys = continuousParents(); - const GaussianMixtureFactor::Factors likelihoods( + const HybridGaussianFactor::Factors likelihoods( conditionals_, [&](const GaussianConditional::shared_ptr &conditional) { const auto likelihood_m = conditional->likelihood(given); const double Cgm_Kgcm = @@ -231,7 +231,7 @@ std::shared_ptr HybridGaussianConditional::likelihood( return std::make_shared(gfg); } }); - return std::make_shared( + return std::make_shared( continuousParentKeys, discreteParentKeys, likelihoods); } diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/HybridGaussianConditional.h similarity index 100% rename from gtsam/hybrid/GaussianMixture.h rename to gtsam/hybrid/HybridGaussianConditional.h diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp similarity index 100% rename from gtsam/hybrid/GaussianMixtureFactor.cpp rename to gtsam/hybrid/HybridGaussianFactor.cpp diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/HybridGaussianFactor.h similarity index 100% rename from gtsam/hybrid/GaussianMixtureFactor.h rename to gtsam/hybrid/HybridGaussianFactor.h diff --git a/gtsam/hybrid/MixtureFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h similarity index 100% rename from gtsam/hybrid/MixtureFactor.h rename to gtsam/hybrid/HybridNonlinearFactor.h diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp similarity index 100% rename from gtsam/hybrid/tests/testGaussianMixture.cpp rename to gtsam/hybrid/tests/testHybridGaussianConditional.cpp diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp similarity index 99% rename from gtsam/hybrid/tests/testGaussianMixtureFactor.cpp rename to gtsam/hybrid/tests/testHybridGaussianFactor.cpp index b90950605..f9d276760 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file testGaussianMixtureFactor.cpp + * @file testHybridGaussianFactor.cpp * @brief Unit tests for HybridGaussianFactor * @author Varun Agrawal * @author Fan Jiang diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp similarity index 98% rename from gtsam/hybrid/tests/testMixtureFactor.cpp rename to gtsam/hybrid/tests/testHybridNonlinearFactor.cpp index dead8cd34..325478e94 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file testMixtureFactor.cpp + * @file testHybridNonlinearFactor.cpp * @brief Unit tests for HybridNonlinearFactor * @author Varun Agrawal * @date October 2022 From 035c92a38fc2d1113aa600106043663288b423a4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 13 Sep 2024 05:59:56 -0400 Subject: [PATCH 212/398] finalize renaming --- doc/Hybrid.lyx | 2 +- gtsam/hybrid/HybridBayesNet.cpp | 6 +++--- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 4 ++-- gtsam/hybrid/HybridNonlinearFactor.h | 2 +- .../tests/testHybridGaussianConditional.cpp | 2 +- gtsam/hybrid/tests/testHybridNonlinearFactor.cpp | 6 +++--- gtsam/hybrid/tests/testSerializationHybrid.cpp | 16 ++++++++-------- 7 files changed, 19 insertions(+), 19 deletions(-) diff --git a/doc/Hybrid.lyx b/doc/Hybrid.lyx index 17d7094e1..a5fdd5d0a 100644 --- a/doc/Hybrid.lyx +++ b/doc/Hybrid.lyx @@ -201,7 +201,7 @@ HybridGaussianConditional \emph default (maybe to be renamed to \emph on -GaussianMixtureComponent +HybridGaussianConditionalComponent \emph default ) just indexes into a number of \emph on diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 90aa6cde2..4f8d2350c 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -182,11 +182,11 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) { for (auto &&conditional : *this) { if (auto gm = conditional->asMixture()) { // Make a copy of the Gaussian mixture and prune it! - auto prunedGaussianMixture = std::make_shared(*gm); - prunedGaussianMixture->prune(prunedDiscreteProbs); // imperative :-( + auto prunedHybridGaussianConditional = std::make_shared(*gm); + prunedHybridGaussianConditional->prune(prunedDiscreteProbs); // imperative :-( // Type-erase and add to the pruned Bayes Net fragment. - prunedBayesNetFragment.push_back(prunedGaussianMixture); + prunedBayesNetFragment.push_back(prunedHybridGaussianConditional); } else { // Add the non-HybridGaussianConditional conditional diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 656060ae6..4c6546708 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -343,7 +343,7 @@ static std::shared_ptr createDiscreteFactor( // Create HybridGaussianFactor on the separator, taking care to correct // for conditional constants. -static std::shared_ptr createGaussianMixtureFactor( +static std::shared_ptr createHybridGaussianFactor( const DecisionTree &eliminationResults, const KeyVector &continuousSeparator, const DiscreteKeys &discreteSeparator) { @@ -405,7 +405,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, auto newFactor = continuousSeparator.empty() ? createDiscreteFactor(eliminationResults, discreteSeparator) - : createGaussianMixtureFactor(eliminationResults, continuousSeparator, + : createHybridGaussianFactor(eliminationResults, continuousSeparator, discreteSeparator); // Create the HybridGaussianConditional from the conditionals diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index d3d673dda..7b2990cb0 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -188,7 +188,7 @@ class HybridNonlinearFactor : public HybridFactor { const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override { std::cout << (s.empty() ? "" : s + " "); Base::print("", keyFormatter); - std::cout << "\nMixtureFactor\n"; + std::cout << "\nHybridNonlinearFactor\n"; auto valueFormatter = [](const sharedFactor& v) { if (v) { return "Nonlinear factor on " + std::to_string(v->size()) + " keys"; diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index 116e1119d..3f18259de 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file testGaussianMixture.cpp + * @file testHybridGaussianConditional.cpp * @brief Unit tests for HybridGaussianConditional class * @author Varun Agrawal * @author Fan Jiang diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp index 325478e94..9e2794b19 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp @@ -73,7 +73,7 @@ HybridNonlinearFactor } /* ************************************************************************* */ -static HybridNonlinearFactor getMixtureFactor() { +static HybridNonlinearFactor getHybridNonlinearFactor() { DiscreteKey m1(1, 2); double between0 = 0.0; @@ -94,7 +94,7 @@ static HybridNonlinearFactor getMixtureFactor() { /* ************************************************************************* */ // Test the error of the HybridNonlinearFactor TEST(HybridNonlinearFactor, Error) { - auto mixtureFactor = getMixtureFactor(); + auto mixtureFactor = getHybridNonlinearFactor(); Values continuousValues; continuousValues.insert(X(1), 0); @@ -114,7 +114,7 @@ TEST(HybridNonlinearFactor, Error) { /* ************************************************************************* */ // Test dim of the HybridNonlinearFactor TEST(HybridNonlinearFactor, Dim) { - auto mixtureFactor = getMixtureFactor(); + auto mixtureFactor = getHybridNonlinearFactor(); EXPECT_LONGS_EQUAL(1, mixtureFactor.dim()); } diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp index 17196d2ae..76d919112 100644 --- a/gtsam/hybrid/tests/testSerializationHybrid.cpp +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -51,21 +51,21 @@ BOOST_CLASS_EXPORT_GUID(ADT, "gtsam_AlgebraicDecisionTree"); BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_AlgebraicDecisionTree_Leaf"); BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_AlgebraicDecisionTree_Choice") -BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor, "gtsam_GaussianMixtureFactor"); +BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor, "gtsam_HybridGaussianFactor"); BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors, - "gtsam_GaussianMixtureFactor_Factors"); + "gtsam_HybridGaussianFactor_Factors"); BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Leaf, - "gtsam_GaussianMixtureFactor_Factors_Leaf"); + "gtsam_HybridGaussianFactor_Factors_Leaf"); BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Choice, - "gtsam_GaussianMixtureFactor_Factors_Choice"); + "gtsam_HybridGaussianFactor_Factors_Choice"); -BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional, "gtsam_GaussianMixture"); +BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional, "gtsam_HybridGaussianConditional"); BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals, - "gtsam_GaussianMixture_Conditionals"); + "gtsam_HybridGaussianConditional_Conditionals"); BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals::Leaf, - "gtsam_GaussianMixture_Conditionals_Leaf"); + "gtsam_HybridGaussianConditional_Conditionals_Leaf"); BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals::Choice, - "gtsam_GaussianMixture_Conditionals_Choice"); + "gtsam_HybridGaussianConditional_Conditionals_Choice"); // Needed since GaussianConditional::FromMeanAndStddev uses it BOOST_CLASS_EXPORT_GUID(noiseModel::Isotropic, "gtsam_noiseModel_Isotropic"); From 629989f9eeabb90425f4f95eac187e98a5affdfb Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 13 Sep 2024 06:20:46 -0400 Subject: [PATCH 213/398] formatting --- gtsam/hybrid/HybridBayesNet.cpp | 6 +- gtsam/hybrid/HybridConditional.cpp | 4 +- gtsam/hybrid/HybridConditional.h | 11 +-- gtsam/hybrid/HybridEliminationTree.h | 4 +- gtsam/hybrid/HybridGaussianConditional.cpp | 34 ++++++---- gtsam/hybrid/HybridGaussianConditional.h | 18 ++--- gtsam/hybrid/HybridGaussianFactor.cpp | 6 +- gtsam/hybrid/HybridGaussianFactor.h | 13 ++-- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 8 +-- gtsam/hybrid/HybridGaussianFactorGraph.h | 3 +- gtsam/hybrid/HybridJunctionTree.h | 5 +- gtsam/hybrid/HybridNonlinearFactor.h | 17 ++--- gtsam/hybrid/HybridNonlinearFactorGraph.cpp | 5 +- gtsam/hybrid/HybridNonlinearISAM.h | 1 + gtsam/hybrid/hybrid.i | 51 +++++++------- gtsam/hybrid/tests/Switching.h | 6 +- gtsam/hybrid/tests/testHybridEstimation.cpp | 2 +- .../hybrid/tests/testHybridGaussianFactor.cpp | 6 +- .../tests/testHybridGaussianFactorGraph.cpp | 23 ++++--- gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 67 ++++++++++--------- .../tests/testHybridNonlinearFactor.cpp | 2 +- .../tests/testHybridNonlinearFactorGraph.cpp | 12 ++-- .../hybrid/tests/testHybridNonlinearISAM.cpp | 55 ++++++++------- .../hybrid/tests/testSerializationHybrid.cpp | 9 +-- python/gtsam/tests/test_HybridBayesNet.py | 2 +- python/gtsam/tests/test_HybridFactorGraph.py | 12 ++-- .../tests/test_HybridNonlinearFactorGraph.py | 6 +- python/gtsam/tests/test_HybridValues.py | 3 +- 28 files changed, 212 insertions(+), 179 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 4f8d2350c..14a54c0b4 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -182,8 +182,10 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) { for (auto &&conditional : *this) { if (auto gm = conditional->asMixture()) { // Make a copy of the Gaussian mixture and prune it! - auto prunedHybridGaussianConditional = std::make_shared(*gm); - prunedHybridGaussianConditional->prune(prunedDiscreteProbs); // imperative :-( + auto prunedHybridGaussianConditional = + std::make_shared(*gm); + prunedHybridGaussianConditional->prune( + prunedDiscreteProbs); // imperative :-( // Type-erase and add to the pruned Bayes Net fragment. prunedBayesNetFragment.push_back(prunedHybridGaussianConditional); diff --git a/gtsam/hybrid/HybridConditional.cpp b/gtsam/hybrid/HybridConditional.cpp index 5ef74ea7d..8a8511aef 100644 --- a/gtsam/hybrid/HybridConditional.cpp +++ b/gtsam/hybrid/HybridConditional.cpp @@ -157,10 +157,10 @@ double HybridConditional::logNormalizationConstant() const { return gc->logNormalizationConstant(); } if (auto gm = asMixture()) { - return gm->logNormalizationConstant(); // 0.0! + return gm->logNormalizationConstant(); // 0.0! } if (auto dc = asDiscrete()) { - return dc->logNormalizationConstant(); // 0.0! + return dc->logNormalizationConstant(); // 0.0! } throw std::runtime_error( "HybridConditional::logProbability: conditional type not handled"); diff --git a/gtsam/hybrid/HybridConditional.h b/gtsam/hybrid/HybridConditional.h index d02b69127..fb51e95f6 100644 --- a/gtsam/hybrid/HybridConditional.h +++ b/gtsam/hybrid/HybridConditional.h @@ -18,8 +18,8 @@ #pragma once #include -#include #include +#include #include #include #include @@ -127,7 +127,8 @@ class GTSAM_EXPORT HybridConditional * @param gaussianMixture Gaussian Mixture Conditional used to create the * HybridConditional. */ - HybridConditional(const std::shared_ptr& gaussianMixture); + HybridConditional( + const std::shared_ptr& gaussianMixture); /// @} /// @name Testable @@ -222,8 +223,10 @@ class GTSAM_EXPORT HybridConditional boost::serialization::void_cast_register( static_cast(NULL), static_cast(NULL)); } else { - boost::serialization::void_cast_register( - static_cast(NULL), static_cast(NULL)); + boost::serialization::void_cast_register( + static_cast(NULL), + static_cast(NULL)); } } #endif diff --git a/gtsam/hybrid/HybridEliminationTree.h b/gtsam/hybrid/HybridEliminationTree.h index e1eb1764a..543e09c6f 100644 --- a/gtsam/hybrid/HybridEliminationTree.h +++ b/gtsam/hybrid/HybridEliminationTree.h @@ -35,8 +35,8 @@ class GTSAM_EXPORT HybridEliminationTree public: typedef EliminationTree - Base; ///< Base class - typedef HybridEliminationTree This; ///< This class + Base; ///< Base class + typedef HybridEliminationTree This; ///< This class typedef std::shared_ptr shared_ptr; ///< Shared pointer to this class /// @name Constructors diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 4ad85b827..42e702155 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -50,7 +50,8 @@ HybridGaussianConditional::HybridGaussianConditional( } /* *******************************************************************************/ -const HybridGaussianConditional::Conditionals &HybridGaussianConditional::conditionals() const { +const HybridGaussianConditional::Conditionals & +HybridGaussianConditional::conditionals() const { return conditionals_; } @@ -59,20 +60,22 @@ HybridGaussianConditional::HybridGaussianConditional( KeyVector &&continuousFrontals, KeyVector &&continuousParents, DiscreteKeys &&discreteParents, std::vector &&conditionals) - : HybridGaussianConditional(continuousFrontals, continuousParents, discreteParents, - Conditionals(discreteParents, conditionals)) {} + : HybridGaussianConditional(continuousFrontals, continuousParents, + discreteParents, + Conditionals(discreteParents, conditionals)) {} /* *******************************************************************************/ HybridGaussianConditional::HybridGaussianConditional( const KeyVector &continuousFrontals, const KeyVector &continuousParents, const DiscreteKeys &discreteParents, const std::vector &conditionals) - : HybridGaussianConditional(continuousFrontals, continuousParents, discreteParents, - Conditionals(discreteParents, conditionals)) {} + : HybridGaussianConditional(continuousFrontals, continuousParents, + discreteParents, + Conditionals(discreteParents, conditionals)) {} /* *******************************************************************************/ -// TODO(dellaert): This is copy/paste: HybridGaussianConditional should be derived from -// HybridGaussianFactor, no? +// TODO(dellaert): This is copy/paste: HybridGaussianConditional should be +// derived from HybridGaussianFactor, no? GaussianFactorGraphTree HybridGaussianConditional::add( const GaussianFactorGraphTree &sum) const { using Y = GaussianFactorGraph; @@ -86,7 +89,8 @@ GaussianFactorGraphTree HybridGaussianConditional::add( } /* *******************************************************************************/ -GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree() const { +GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree() + const { auto wrap = [this](const GaussianConditional::shared_ptr &gc) { // First check if conditional has not been pruned if (gc) { @@ -131,7 +135,8 @@ GaussianConditional::shared_ptr HybridGaussianConditional::operator()( } /* *******************************************************************************/ -bool HybridGaussianConditional::equals(const HybridFactor &lf, double tol) const { +bool HybridGaussianConditional::equals(const HybridFactor &lf, + double tol) const { const This *e = dynamic_cast(&lf); if (e == nullptr) return false; @@ -150,7 +155,7 @@ bool HybridGaussianConditional::equals(const HybridFactor &lf, double tol) const /* *******************************************************************************/ void HybridGaussianConditional::print(const std::string &s, - const KeyFormatter &formatter) const { + const KeyFormatter &formatter) const { std::cout << (s.empty() ? "" : s + "\n"); if (isContinuous()) std::cout << "Continuous "; if (isDiscrete()) std::cout << "Discrete "; @@ -193,7 +198,8 @@ KeyVector HybridGaussianConditional::continuousParents() const { } /* ************************************************************************* */ -bool HybridGaussianConditional::allFrontalsGiven(const VectorValues &given) const { +bool HybridGaussianConditional::allFrontalsGiven( + const VectorValues &given) const { for (auto &&kv : given) { if (given.find(kv.first) == given.end()) { return false; @@ -207,7 +213,8 @@ std::shared_ptr HybridGaussianConditional::likelihood( const VectorValues &given) const { if (!allFrontalsGiven(given)) { throw std::runtime_error( - "HybridGaussianConditional::likelihood: given values are missing some frontals."); + "HybridGaussianConditional::likelihood: given values are missing some " + "frontals."); } const DiscreteKeys discreteParentKeys = discreteKeys(); @@ -365,7 +372,8 @@ double HybridGaussianConditional::error(const HybridValues &values) const { } /* *******************************************************************************/ -double HybridGaussianConditional::logProbability(const HybridValues &values) const { +double HybridGaussianConditional::logProbability( + const HybridValues &values) const { auto conditional = conditionals_(values.discrete()); return conditional->logProbability(values.continuous()); } diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 87ddb2cb8..fc315c6f9 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -23,8 +23,8 @@ #include #include #include -#include #include +#include #include #include @@ -102,9 +102,9 @@ class GTSAM_EXPORT HybridGaussianConditional * discreteParents will be used as the labels in the decision tree. */ HybridGaussianConditional(const KeyVector &continuousFrontals, - const KeyVector &continuousParents, - const DiscreteKeys &discreteParents, - const Conditionals &conditionals); + const KeyVector &continuousParents, + const DiscreteKeys &discreteParents, + const Conditionals &conditionals); /** * @brief Make a Gaussian Mixture from a list of Gaussian conditionals @@ -114,9 +114,10 @@ class GTSAM_EXPORT HybridGaussianConditional * @param discreteParents Discrete parents variables * @param conditionals List of conditionals */ - HybridGaussianConditional(KeyVector &&continuousFrontals, KeyVector &&continuousParents, - DiscreteKeys &&discreteParents, - std::vector &&conditionals); + HybridGaussianConditional( + KeyVector &&continuousFrontals, KeyVector &&continuousParents, + DiscreteKeys &&discreteParents, + std::vector &&conditionals); /** * @brief Make a Gaussian Mixture from a list of Gaussian conditionals @@ -277,6 +278,7 @@ std::set DiscreteKeysAsSet(const DiscreteKeys &discreteKeys); // traits template <> -struct traits : public Testable {}; +struct traits + : public Testable {}; } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index 4445d875f..f7207bf81 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -30,8 +30,8 @@ namespace gtsam { /* *******************************************************************************/ HybridGaussianFactor::HybridGaussianFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys, - const Factors &factors) + const DiscreteKeys &discreteKeys, + const Factors &factors) : Base(continuousKeys, discreteKeys), factors_(factors) {} /* *******************************************************************************/ @@ -53,7 +53,7 @@ bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const { /* *******************************************************************************/ void HybridGaussianFactor::print(const std::string &s, - const KeyFormatter &formatter) const { + const KeyFormatter &formatter) const { std::cout << (s.empty() ? "" : s + "\n"); std::cout << "HybridGaussianFactor" << std::endl; HybridFactor::print("", formatter); diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 9915e24e6..650f7a5a7 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -84,8 +84,8 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * as the mixture density. */ HybridGaussianFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys, - const Factors &factors); + const DiscreteKeys &discreteKeys, + const Factors &factors); /** * @brief Construct a new HybridGaussianFactor object using a vector of @@ -96,10 +96,10 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * @param factors Vector of gaussian factor shared pointers. */ HybridGaussianFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys, - const std::vector &factors) + const DiscreteKeys &discreteKeys, + const std::vector &factors) : HybridGaussianFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors)) {} + Factors(discreteKeys, factors)) {} /// @} /// @name Testable @@ -168,7 +168,6 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { // traits template <> -struct traits : public Testable { -}; +struct traits : public Testable {}; } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 4c6546708..900ec90c7 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -23,11 +23,11 @@ #include #include #include -#include -#include #include #include #include +#include +#include #include #include #include @@ -363,7 +363,7 @@ static std::shared_ptr createHybridGaussianFactor( correct); return std::make_shared(continuousSeparator, - discreteSeparator, newFactors); + discreteSeparator, newFactors); } static std::pair> @@ -406,7 +406,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, continuousSeparator.empty() ? createDiscreteFactor(eliminationResults, discreteSeparator) : createHybridGaussianFactor(eliminationResults, continuousSeparator, - discreteSeparator); + discreteSeparator); // Create the HybridGaussianConditional from the conditionals HybridGaussianConditional::Conditionals conditionals( diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index e6ce4467c..911058437 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -18,9 +18,9 @@ #pragma once -#include #include #include +#include #include #include #include @@ -221,7 +221,6 @@ class GTSAM_EXPORT HybridGaussianFactorGraph /// Get the GaussianFactorGraph at a given discrete assignment. GaussianFactorGraph operator()(const DiscreteValues& assignment) const; - }; } // namespace gtsam diff --git a/gtsam/hybrid/HybridJunctionTree.h b/gtsam/hybrid/HybridJunctionTree.h index a197e6e3c..17b4649fc 100644 --- a/gtsam/hybrid/HybridJunctionTree.h +++ b/gtsam/hybrid/HybridJunctionTree.h @@ -51,11 +51,10 @@ class HybridEliminationTree; */ class GTSAM_EXPORT HybridJunctionTree : public JunctionTree { - public: typedef JunctionTree - Base; ///< Base class - typedef HybridJunctionTree This; ///< This class + Base; ///< Base class + typedef HybridJunctionTree This; ///< This class typedef std::shared_ptr shared_ptr; ///< Shared pointer to this class /** diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 7b2990cb0..f5d9cd356 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -75,7 +75,7 @@ class HybridNonlinearFactor : public HybridFactor { * normalized. */ HybridNonlinearFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, - const Factors& factors, bool normalized = false) + const Factors& factors, bool normalized = false) : Base(keys, discreteKeys), factors_(factors), normalized_(normalized) {} /** @@ -96,8 +96,8 @@ class HybridNonlinearFactor : public HybridFactor { */ template HybridNonlinearFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, - const std::vector>& factors, - bool normalized = false) + const std::vector>& factors, + bool normalized = false) : Base(keys, discreteKeys), normalized_(normalized) { std::vector nonlinear_factors; KeySet continuous_keys_set(keys.begin(), keys.end()); @@ -201,13 +201,14 @@ class HybridNonlinearFactor : public HybridFactor { /// Check equality bool equals(const HybridFactor& other, double tol = 1e-9) const override { - // We attempt a dynamic cast from HybridFactor to HybridNonlinearFactor. If it - // fails, return false. + // We attempt a dynamic cast from HybridFactor to HybridNonlinearFactor. If + // it fails, return false. if (!dynamic_cast(&other)) return false; - // If the cast is successful, we'll properly construct a HybridNonlinearFactor - // object from `other` - const HybridNonlinearFactor& f(static_cast(other)); + // If the cast is successful, we'll properly construct a + // HybridNonlinearFactor object from `other` + const HybridNonlinearFactor& f( + static_cast(other)); // Ensure that this HybridNonlinearFactor and `f` have the same `factors_`. auto compare = [tol](const sharedFactor& a, const sharedFactor& b) { diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp index 76703ad2d..78370a157 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -20,8 +20,8 @@ #include #include #include -#include #include +#include #include namespace gtsam { @@ -80,7 +80,8 @@ void HybridNonlinearFactorGraph::printErrors( gmf->errorTree(values.continuous()).print("", keyFormatter); std::cout << std::endl; } - } else if (auto gm = std::dynamic_pointer_cast(factor)) { + } else if (auto gm = std::dynamic_pointer_cast( + factor)) { if (factor == nullptr) { std::cout << "nullptr" << "\n"; diff --git a/gtsam/hybrid/HybridNonlinearISAM.h b/gtsam/hybrid/HybridNonlinearISAM.h index 3372593be..e41b4ebe1 100644 --- a/gtsam/hybrid/HybridNonlinearISAM.h +++ b/gtsam/hybrid/HybridNonlinearISAM.h @@ -19,6 +19,7 @@ #include #include + #include namespace gtsam { diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index 834a067c3..686ce60ac 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -10,26 +10,26 @@ class HybridValues { gtsam::DiscreteValues discrete() const; HybridValues(); - HybridValues(const gtsam::VectorValues &cv, const gtsam::DiscreteValues &dv); + HybridValues(const gtsam::VectorValues& cv, const gtsam::DiscreteValues& dv); void print(string s = "HybridValues", const gtsam::KeyFormatter& keyFormatter = gtsam::DefaultKeyFormatter) const; bool equals(const gtsam::HybridValues& other, double tol) const; - + void insert(gtsam::Key j, int value); void insert(gtsam::Key j, const gtsam::Vector& value); - + void insert_or_assign(gtsam::Key j, const gtsam::Vector& value); void insert_or_assign(gtsam::Key j, size_t value); void insert(const gtsam::VectorValues& values); void insert(const gtsam::DiscreteValues& values); void insert(const gtsam::HybridValues& values); - + void update(const gtsam::VectorValues& values); void update(const gtsam::DiscreteValues& values); void update(const gtsam::HybridValues& values); - + size_t& atDiscrete(gtsam::Key j); gtsam::Vector& at(gtsam::Key j); }; @@ -42,7 +42,7 @@ virtual class HybridFactor : gtsam::Factor { bool equals(const gtsam::HybridFactor& other, double tol = 1e-9) const; // Standard interface: - double error(const gtsam::HybridValues &values) const; + double error(const gtsam::HybridValues& values) const; bool isDiscrete() const; bool isContinuous() const; bool isHybrid() const; @@ -86,11 +86,12 @@ class HybridGaussianFactor : gtsam::HybridFactor { #include class HybridGaussianConditional : gtsam::HybridFactor { - HybridGaussianConditional(const gtsam::KeyVector& continuousFrontals, - const gtsam::KeyVector& continuousParents, - const gtsam::DiscreteKeys& discreteParents, - const std::vector& - conditionalsList); + HybridGaussianConditional( + const gtsam::KeyVector& continuousFrontals, + const gtsam::KeyVector& continuousParents, + const gtsam::DiscreteKeys& discreteParents, + const std::vector& + conditionalsList); gtsam::HybridGaussianFactor* likelihood( const gtsam::VectorValues& frontals) const; @@ -139,7 +140,7 @@ class HybridBayesNet { size_t size() const; gtsam::KeySet keys() const; const gtsam::HybridConditional* at(size_t i) const; - + // Standard interface: double logProbability(const gtsam::HybridValues& values) const; double evaluate(const gtsam::HybridValues& values) const; @@ -149,7 +150,7 @@ class HybridBayesNet { const gtsam::VectorValues& measurements) const; gtsam::HybridValues optimize() const; - gtsam::HybridValues sample(const gtsam::HybridValues &given) const; + gtsam::HybridValues sample(const gtsam::HybridValues& given) const; gtsam::HybridValues sample() const; void print(string s = "HybridBayesNet\n", @@ -189,7 +190,8 @@ class HybridGaussianFactorGraph { const gtsam::HybridFactor* at(size_t i) const; void print(string s = "") const; - bool equals(const gtsam::HybridGaussianFactorGraph& fg, double tol = 1e-9) const; + bool equals(const gtsam::HybridGaussianFactorGraph& fg, + double tol = 1e-9) const; // evaluation double error(const gtsam::HybridValues& values) const; @@ -222,7 +224,8 @@ class HybridNonlinearFactorGraph { void push_back(gtsam::HybridFactor* factor); void push_back(gtsam::NonlinearFactor* factor); void push_back(gtsam::DiscreteFactor* factor); - gtsam::HybridGaussianFactorGraph linearize(const gtsam::Values& continuousValues) const; + gtsam::HybridGaussianFactorGraph linearize( + const gtsam::Values& continuousValues) const; bool empty() const; void remove(size_t i); @@ -231,8 +234,8 @@ class HybridNonlinearFactorGraph { const gtsam::HybridFactor* at(size_t i) const; void print(string s = "HybridNonlinearFactorGraph\n", - const gtsam::KeyFormatter& keyFormatter = - gtsam::DefaultKeyFormatter) const; + const gtsam::KeyFormatter& keyFormatter = + gtsam::DefaultKeyFormatter) const; }; #include @@ -243,18 +246,18 @@ class HybridNonlinearFactor : gtsam::HybridFactor { bool normalized = false); template - HybridNonlinearFactor(const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys, - const std::vector& factors, - bool normalized = false); + HybridNonlinearFactor(const gtsam::KeyVector& keys, + const gtsam::DiscreteKeys& discreteKeys, + const std::vector& factors, + bool normalized = false); double error(const gtsam::Values& continuousValues, const gtsam::DiscreteValues& discreteValues) const; - double nonlinearFactorLogNormalizingConstant(const gtsam::NonlinearFactor* factor, - const gtsam::Values& values) const; + double nonlinearFactorLogNormalizingConstant( + const gtsam::NonlinearFactor* factor, const gtsam::Values& values) const; - HybridGaussianFactor* linearize( - const gtsam::Values& continuousValues) const; + HybridGaussianFactor* linearize(const gtsam::Values& continuousValues) const; void print(string s = "HybridNonlinearFactor\n", const gtsam::KeyFormatter& keyFormatter = diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 0e8680aa9..f08d29fd5 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -21,8 +21,8 @@ #include #include #include -#include #include +#include #include #include #include @@ -60,9 +60,9 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( hfg.add(HybridGaussianFactor( {keyFunc(t), keyFunc(t + 1)}, {{dKeyFunc(t), 2}}, {std::make_shared(keyFunc(t), I_3x3, keyFunc(t + 1), - I_3x3, Z_3x1), + I_3x3, Z_3x1), std::make_shared(keyFunc(t), I_3x3, keyFunc(t + 1), - I_3x3, Vector3::Ones())})); + I_3x3, Vector3::Ones())})); if (t > 1) { hfg.add(DecisionTreeFactor({{dKeyFunc(t - 1), 2}, {dKeyFunc(t), 2}}, diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 299562e32..308daca9a 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -19,10 +19,10 @@ #include #include #include +#include #include #include #include -#include #include #include #include diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index f9d276760..847a38fa9 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -20,9 +20,9 @@ #include #include +#include #include #include -#include #include #include #include @@ -229,8 +229,8 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, c1 = make_shared(z, Vector1(mu1), I_1x1, model1); HybridBayesNet hbn; - hbn.emplace_shared(KeyVector{z}, KeyVector{}, - DiscreteKeys{m}, std::vector{c0, c1}); + hbn.emplace_shared( + KeyVector{z}, KeyVector{}, DiscreteKeys{m}, std::vector{c0, c1}); auto mixing = make_shared(m, "0.5/0.5"); hbn.push_back(mixing); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 3386daac8..acd7b280f 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -21,12 +21,12 @@ #include #include #include -#include -#include #include #include #include #include +#include +#include #include #include #include @@ -71,13 +71,14 @@ TEST(HybridGaussianFactorGraph, Creation) { // Define a gaussian mixture conditional P(x0|x1, c0) and add it to the factor // graph - HybridGaussianConditional gm({X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}), - HybridGaussianConditional::Conditionals( - M(0), - std::make_shared( - X(0), Z_3x1, I_3x3, X(1), I_3x3), - std::make_shared( - X(0), Vector3::Ones(), I_3x3, X(1), I_3x3))); + HybridGaussianConditional gm( + {X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}), + HybridGaussianConditional::Conditionals( + M(0), + std::make_shared(X(0), Z_3x1, I_3x3, X(1), + I_3x3), + std::make_shared(X(0), Vector3::Ones(), I_3x3, + X(1), I_3x3))); hfg.add(gm); EXPECT_LONGS_EQUAL(2, hfg.size()); @@ -681,8 +682,8 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) { x0, -I_1x1, model0), c1 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, x0, -I_1x1, model1); - hbn.emplace_shared(KeyVector{f01}, KeyVector{x0, x1}, - DiscreteKeys{m1}, std::vector{c0, c1}); + hbn.emplace_shared( + KeyVector{f01}, KeyVector{x0, x1}, DiscreteKeys{m1}, std::vector{c0, c1}); // Discrete uniform prior. hbn.emplace_shared(m1, "0.5/0.5"); diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index 8b15b50d2..d0a4a79af 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -126,31 +126,34 @@ TEST(HybridGaussianElimination, IncrementalInference) { /********************************************************/ // Run batch elimination so we can compare results. - const Ordering ordering {X(0), X(1), X(2)}; + const Ordering ordering{X(0), X(1), X(2)}; // Now we calculate the expected factors using full elimination const auto [expectedHybridBayesTree, expectedRemainingGraph] = switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); // The densities on X(0) should be the same - auto x0_conditional = - dynamic_pointer_cast(isam[X(0)]->conditional()->inner()); - auto expected_x0_conditional = dynamic_pointer_cast( - (*expectedHybridBayesTree)[X(0)]->conditional()->inner()); + auto x0_conditional = dynamic_pointer_cast( + isam[X(0)]->conditional()->inner()); + auto expected_x0_conditional = + dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(0)]->conditional()->inner()); EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional)); // The densities on X(1) should be the same - auto x1_conditional = - dynamic_pointer_cast(isam[X(1)]->conditional()->inner()); - auto expected_x1_conditional = dynamic_pointer_cast( - (*expectedHybridBayesTree)[X(1)]->conditional()->inner()); + auto x1_conditional = dynamic_pointer_cast( + isam[X(1)]->conditional()->inner()); + auto expected_x1_conditional = + dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(1)]->conditional()->inner()); EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional)); // The densities on X(2) should be the same - auto x2_conditional = - dynamic_pointer_cast(isam[X(2)]->conditional()->inner()); - auto expected_x2_conditional = dynamic_pointer_cast( - (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); + auto x2_conditional = dynamic_pointer_cast( + isam[X(2)]->conditional()->inner()); + auto expected_x2_conditional = + dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional)); // We only perform manual continuous elimination for 0,0. @@ -381,11 +384,11 @@ TEST(HybridGaussianISAM, NonTrivial) { // Add connecting poses similar to PoseFactors in GTD fg.emplace_shared>(X(0), Y(0), Pose2(0, 1.0, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Y(0), Z(0), Pose2(0, 1.0, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Z(0), W(0), Pose2(0, 1.0, 0), - poseNoise); + poseNoise); // Create initial estimate Values initial; @@ -414,9 +417,9 @@ TEST(HybridGaussianISAM, NonTrivial) { KeyVector contKeys = {W(0), W(1)}; auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); auto still = std::make_shared(W(0), W(1), Pose2(0, 0, 0), - noise_model), + noise_model), moving = std::make_shared(W(0), W(1), odometry, - noise_model); + noise_model); std::vector components = {moving, still}; auto mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); @@ -424,14 +427,14 @@ TEST(HybridGaussianISAM, NonTrivial) { // Add equivalent of ImuFactor fg.emplace_shared>(X(0), X(1), Pose2(1.0, 0.0, 0), - poseNoise); + poseNoise); // PoseFactors-like at k=1 fg.emplace_shared>(X(1), Y(1), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Y(1), Z(1), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Z(1), W(1), Pose2(-1, 1, 0), - poseNoise); + poseNoise); initial.insert(X(1), Pose2(1.0, 0.0, 0.0)); initial.insert(Y(1), Pose2(1.0, 1.0, 0.0)); @@ -454,7 +457,7 @@ TEST(HybridGaussianISAM, NonTrivial) { // Add odometry factor with discrete modes. contKeys = {W(1), W(2)}; still = std::make_shared(W(1), W(2), Pose2(0, 0, 0), - noise_model); + noise_model); moving = std::make_shared(W(1), W(2), odometry, noise_model); components = {moving, still}; @@ -464,14 +467,14 @@ TEST(HybridGaussianISAM, NonTrivial) { // Add equivalent of ImuFactor fg.emplace_shared>(X(1), X(2), Pose2(1.0, 0.0, 0), - poseNoise); + poseNoise); // PoseFactors-like at k=1 fg.emplace_shared>(X(2), Y(2), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Y(2), Z(2), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Z(2), W(2), Pose2(-2, 1, 0), - poseNoise); + poseNoise); initial.insert(X(2), Pose2(2.0, 0.0, 0.0)); initial.insert(Y(2), Pose2(2.0, 1.0, 0.0)); @@ -497,7 +500,7 @@ TEST(HybridGaussianISAM, NonTrivial) { // Add odometry factor with discrete modes. contKeys = {W(2), W(3)}; still = std::make_shared(W(2), W(3), Pose2(0, 0, 0), - noise_model); + noise_model); moving = std::make_shared(W(2), W(3), odometry, noise_model); components = {moving, still}; @@ -507,14 +510,14 @@ TEST(HybridGaussianISAM, NonTrivial) { // Add equivalent of ImuFactor fg.emplace_shared>(X(2), X(3), Pose2(1.0, 0.0, 0), - poseNoise); + poseNoise); // PoseFactors-like at k=3 fg.emplace_shared>(X(3), Y(3), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Y(3), Z(3), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Z(3), W(3), Pose2(-3, 1, 0), - poseNoise); + poseNoise); initial.insert(X(3), Pose2(3.0, 0.0, 0.0)); initial.insert(Y(3), Pose2(3.0, 1.0, 0.0)); diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp index 9e2794b19..71d5108a0 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp @@ -20,8 +20,8 @@ #include #include #include -#include #include +#include #include #include diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 3f9a18653..fd3494bea 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -23,8 +23,8 @@ #include #include #include -#include #include +#include #include #include #include @@ -150,8 +150,8 @@ TEST(HybridGaussianFactorGraph, Resize) { } /*************************************************************************** - * Test that the HybridNonlinearFactor reports correctly if the number of continuous - * keys provided do not match the keys in the factors. + * Test that the HybridNonlinearFactor reports correctly if the number of + * continuous keys provided do not match the keys in the factors. */ TEST(HybridGaussianFactorGraph, HybridNonlinearFactor) { auto nonlinearFactor = std::make_shared>( @@ -350,7 +350,8 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) { EliminateHybrid(factors, ordering); auto gaussianConditionalMixture = - dynamic_pointer_cast(hybridConditionalMixture->inner()); + dynamic_pointer_cast( + hybridConditionalMixture->inner()); CHECK(gaussianConditionalMixture); // Frontals = [x0, x1] @@ -413,7 +414,8 @@ TEST(HybridFactorGraph, PrintErrors) { // fg.print(); // std::cout << "\n\n\n" << std::endl; // fg.printErrors( - // HybridValues(hv.continuous(), DiscreteValues(), self.linearizationPoint)); + // HybridValues(hv.continuous(), DiscreteValues(), + // self.linearizationPoint)); } /**************************************************************************** diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index 06a4249d3..f4054c11a 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -143,7 +143,7 @@ TEST(HybridNonlinearISAM, IncrementalInference) { /********************************************************/ // Run batch elimination so we can compare results. - const Ordering ordering {X(0), X(1), X(2)}; + const Ordering ordering{X(0), X(1), X(2)}; // Now we calculate the actual factors using full elimination const auto [expectedHybridBayesTree, expectedRemainingGraph] = @@ -153,22 +153,25 @@ TEST(HybridNonlinearISAM, IncrementalInference) { // The densities on X(1) should be the same auto x0_conditional = dynamic_pointer_cast( bayesTree[X(0)]->conditional()->inner()); - auto expected_x0_conditional = dynamic_pointer_cast( - (*expectedHybridBayesTree)[X(0)]->conditional()->inner()); + auto expected_x0_conditional = + dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(0)]->conditional()->inner()); EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional)); // The densities on X(1) should be the same auto x1_conditional = dynamic_pointer_cast( bayesTree[X(1)]->conditional()->inner()); - auto expected_x1_conditional = dynamic_pointer_cast( - (*expectedHybridBayesTree)[X(1)]->conditional()->inner()); + auto expected_x1_conditional = + dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(1)]->conditional()->inner()); EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional)); // The densities on X(2) should be the same auto x2_conditional = dynamic_pointer_cast( bayesTree[X(2)]->conditional()->inner()); - auto expected_x2_conditional = dynamic_pointer_cast( - (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); + auto expected_x2_conditional = + dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional)); // We only perform manual continuous elimination for 0,0. @@ -410,11 +413,11 @@ TEST(HybridNonlinearISAM, NonTrivial) { // Add connecting poses similar to PoseFactors in GTD fg.emplace_shared>(X(0), Y(0), Pose2(0, 1.0, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Y(0), Z(0), Pose2(0, 1.0, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Z(0), W(0), Pose2(0, 1.0, 0), - poseNoise); + poseNoise); // Create initial estimate Values initial; @@ -433,9 +436,9 @@ TEST(HybridNonlinearISAM, NonTrivial) { KeyVector contKeys = {W(0), W(1)}; auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); auto still = std::make_shared(W(0), W(1), Pose2(0, 0, 0), - noise_model), + noise_model), moving = std::make_shared(W(0), W(1), odometry, - noise_model); + noise_model); std::vector components = {moving, still}; auto mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); @@ -443,14 +446,14 @@ TEST(HybridNonlinearISAM, NonTrivial) { // Add equivalent of ImuFactor fg.emplace_shared>(X(0), X(1), Pose2(1.0, 0.0, 0), - poseNoise); + poseNoise); // PoseFactors-like at k=1 fg.emplace_shared>(X(1), Y(1), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Y(1), Z(1), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Z(1), W(1), Pose2(-1, 1, 0), - poseNoise); + poseNoise); initial.insert(X(1), Pose2(1.0, 0.0, 0.0)); initial.insert(Y(1), Pose2(1.0, 1.0, 0.0)); @@ -473,7 +476,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { // Add odometry factor with discrete modes. contKeys = {W(1), W(2)}; still = std::make_shared(W(1), W(2), Pose2(0, 0, 0), - noise_model); + noise_model); moving = std::make_shared(W(1), W(2), odometry, noise_model); components = {moving, still}; @@ -483,14 +486,14 @@ TEST(HybridNonlinearISAM, NonTrivial) { // Add equivalent of ImuFactor fg.emplace_shared>(X(1), X(2), Pose2(1.0, 0.0, 0), - poseNoise); + poseNoise); // PoseFactors-like at k=1 fg.emplace_shared>(X(2), Y(2), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Y(2), Z(2), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Z(2), W(2), Pose2(-2, 1, 0), - poseNoise); + poseNoise); initial.insert(X(2), Pose2(2.0, 0.0, 0.0)); initial.insert(Y(2), Pose2(2.0, 1.0, 0.0)); @@ -516,7 +519,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { // Add odometry factor with discrete modes. contKeys = {W(2), W(3)}; still = std::make_shared(W(2), W(3), Pose2(0, 0, 0), - noise_model); + noise_model); moving = std::make_shared(W(2), W(3), odometry, noise_model); components = {moving, still}; @@ -526,14 +529,14 @@ TEST(HybridNonlinearISAM, NonTrivial) { // Add equivalent of ImuFactor fg.emplace_shared>(X(2), X(3), Pose2(1.0, 0.0, 0), - poseNoise); + poseNoise); // PoseFactors-like at k=3 fg.emplace_shared>(X(3), Y(3), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Y(3), Z(3), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Z(3), W(3), Pose2(-3, 1, 0), - poseNoise); + poseNoise); initial.insert(X(3), Pose2(3.0, 0.0, 0.0)); initial.insert(Y(3), Pose2(3.0, 1.0, 0.0)); diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp index 76d919112..365dfcc9f 100644 --- a/gtsam/hybrid/tests/testSerializationHybrid.cpp +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -18,11 +18,11 @@ #include #include -#include -#include #include #include #include +#include +#include #include #include @@ -59,7 +59,8 @@ BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Leaf, BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Choice, "gtsam_HybridGaussianFactor_Factors_Choice"); -BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional, "gtsam_HybridGaussianConditional"); +BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional, + "gtsam_HybridGaussianConditional"); BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals, "gtsam_HybridGaussianConditional_Conditionals"); BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals::Leaf, @@ -115,7 +116,7 @@ TEST(HybridSerialization, HybridGaussianConditional) { const auto conditional1 = std::make_shared( GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3)); const HybridGaussianConditional gm({Z(0)}, {X(0)}, {mode}, - {conditional0, conditional1}); + {conditional0, conditional1}); EXPECT(equalsObj(gm)); EXPECT(equalsXML(gm)); diff --git a/python/gtsam/tests/test_HybridBayesNet.py b/python/gtsam/tests/test_HybridBayesNet.py index 0fd569908..6e50c2bfe 100644 --- a/python/gtsam/tests/test_HybridBayesNet.py +++ b/python/gtsam/tests/test_HybridBayesNet.py @@ -51,7 +51,7 @@ class TestHybridBayesNet(GtsamTestCase): bayesNet.push_back(conditional) bayesNet.push_back( HybridGaussianConditional([X(1)], [], discrete_keys, - [conditional0, conditional1])) + [conditional0, conditional1])) bayesNet.push_back(DiscreteConditional(Asia, "99/1")) # Create values at which to evaluate. diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index c7c4c6b51..f82665c49 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -27,6 +27,7 @@ DEBUG_MARGINALS = False class TestHybridGaussianFactorGraph(GtsamTestCase): """Unit tests for HybridGaussianFactorGraph.""" + def test_create(self): """Test construction of hybrid factor graph.""" model = noiseModel.Unit.Create(3) @@ -106,8 +107,9 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): I_1x1, X(0), [0], sigma=3) - bayesNet.push_back(HybridGaussianConditional([Z(i)], [X(0)], keys, - [conditional0, conditional1])) + bayesNet.push_back( + HybridGaussianConditional([Z(i)], [X(0)], keys, + [conditional0, conditional1])) # Create prior on X(0). prior_on_x0 = GaussianConditional.FromMeanAndStddev( @@ -219,9 +221,9 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): # Check ratio between unnormalized posterior and factor graph is the same for all modes: for mode in [1, 0]: values.insert_or_assign(M(0), mode) - self.assertAlmostEqual(bayesNet.evaluate(values) / - np.exp(-fg.error(values)), - 0.6366197723675815) + self.assertAlmostEqual( + bayesNet.evaluate(values) / np.exp(-fg.error(values)), + 0.6366197723675815) self.assertAlmostEqual(bayesNet.error(values), fg.error(values)) # Test elimination. diff --git a/python/gtsam/tests/test_HybridNonlinearFactorGraph.py b/python/gtsam/tests/test_HybridNonlinearFactorGraph.py index 3c2a70f73..3a659c4e8 100644 --- a/python/gtsam/tests/test_HybridNonlinearFactorGraph.py +++ b/python/gtsam/tests/test_HybridNonlinearFactorGraph.py @@ -24,12 +24,14 @@ from gtsam import BetweenFactorPoint3, Point3, PriorFactorPoint3, noiseModel class TestHybridGaussianFactorGraph(GtsamTestCase): """Unit tests for HybridGaussianFactorGraph.""" + def test_nonlinear_hybrid(self): nlfg = gtsam.HybridNonlinearFactorGraph() dk = gtsam.DiscreteKeys() dk.push_back((10, 2)) - nlfg.push_back(BetweenFactorPoint3(1, 2, Point3( - 1, 2, 3), noiseModel.Diagonal.Variances([1, 1, 1]))) + nlfg.push_back( + BetweenFactorPoint3(1, 2, Point3(1, 2, 3), + noiseModel.Diagonal.Variances([1, 1, 1]))) nlfg.push_back( PriorFactorPoint3(2, Point3(1, 2, 3), noiseModel.Diagonal.Variances([0.5, 0.5, 0.5]))) diff --git a/python/gtsam/tests/test_HybridValues.py b/python/gtsam/tests/test_HybridValues.py index 8a54d518c..73c27a4cd 100644 --- a/python/gtsam/tests/test_HybridValues.py +++ b/python/gtsam/tests/test_HybridValues.py @@ -14,11 +14,12 @@ from __future__ import print_function import unittest -import gtsam import numpy as np from gtsam.symbol_shorthand import C, X from gtsam.utils.test_case import GtsamTestCase +import gtsam + class TestHybridValues(GtsamTestCase): """Unit tests for HybridValues.""" From 8da15fd3e02ce41ba2f152d3522a0a64306533a0 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 13 Sep 2024 13:40:22 -0400 Subject: [PATCH 214/398] update after merge --- gtsam/hybrid/tests/testHybridGaussianFactor.cpp | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index e1b4e46f4..0b40ecc72 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -399,17 +399,15 @@ void addMeasurement(HybridBayesNet& hbn, Key z_key, Key x_key, double sigma) { } /// Create hybrid motion model p(x1 | x0, m1) -static GaussianMixture::shared_ptr CreateHybridMotionModel(double mu0, - double mu1, - double sigma0, - double sigma1) { +static HybridGaussianConditional::shared_ptr CreateHybridMotionModel( + double mu0, double mu1, double sigma0, double sigma1) { auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); auto c0 = make_shared(X(1), Vector1(mu0), I_1x1, X(0), -I_1x1, model0), c1 = make_shared(X(1), Vector1(mu1), I_1x1, X(0), -I_1x1, model1); - return std::make_shared( + return std::make_shared( KeyVector{X(1)}, KeyVector{X(0)}, DiscreteKeys{m1}, std::vector{c0, c1}); } @@ -621,7 +619,7 @@ TEST(HybridGaussianFactor, TwoStateModel2) { * measurements and vastly different motion model: either stand still or move * far. This yields a very informative posterior. */ -TEST(GaussianMixtureFactor, TwoStateModel3) { +TEST(HybridGaussianFactor, TwoStateModel3) { using namespace test_two_state_estimation; double mu0 = 0.0, mu1 = 10.0; From 4343b3aadca7ecad7d40a972349d0ea3a5114d22 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 13 Sep 2024 15:20:27 -0400 Subject: [PATCH 215/398] update HybridNonlinearFactor to accept a tree of nonlinear factors and arbitrary scalars --- gtsam/hybrid/HybridNonlinearFactor.h | 64 +++++++++++-------- gtsam/hybrid/tests/Switching.h | 5 +- gtsam/hybrid/tests/testHybridBayesNet.cpp | 3 +- gtsam/hybrid/tests/testHybridEstimation.cpp | 13 ++-- gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 7 +- .../tests/testHybridNonlinearFactor.cpp | 6 +- .../tests/testHybridNonlinearFactorGraph.cpp | 9 ++- .../hybrid/tests/testHybridNonlinearISAM.cpp | 7 +- 8 files changed, 67 insertions(+), 47 deletions(-) diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index f5d9cd356..2c2810f52 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -53,9 +53,9 @@ class HybridNonlinearFactor : public HybridFactor { /** * @brief typedef for DecisionTree which has Keys as node labels and - * NonlinearFactor as leaf nodes. + * pairs of NonlinearFactor & an arbitrary scalar as leaf nodes. */ - using Factors = DecisionTree; + using Factors = DecisionTree>; private: /// Decision tree of Gaussian factors indexed by discrete keys. @@ -90,25 +90,27 @@ class HybridNonlinearFactor : public HybridFactor { * Will be typecast to NonlinearFactor shared pointers. * @param keys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. - * @param factors Vector of nonlinear factors. + * @param factors Vector of nonlinear factor and scalar pairs. * @param normalized Flag indicating if the factor error is already * normalized. */ template - HybridNonlinearFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, - const std::vector>& factors, - bool normalized = false) + HybridNonlinearFactor( + const KeyVector& keys, const DiscreteKeys& discreteKeys, + const std::vector, double>>& factors, + bool normalized = false) : Base(keys, discreteKeys), normalized_(normalized) { - std::vector nonlinear_factors; + std::vector> + nonlinear_factors; KeySet continuous_keys_set(keys.begin(), keys.end()); KeySet factor_keys_set; - for (auto&& f : factors) { + for (auto&& [f, val] : factors) { // Insert all factor continuous keys in the continuous keys set. std::copy(f->keys().begin(), f->keys().end(), std::inserter(factor_keys_set, factor_keys_set.end())); if (auto nf = std::dynamic_pointer_cast(f)) { - nonlinear_factors.push_back(nf); + nonlinear_factors.push_back(std::make_pair(nf, val)); } else { throw std::runtime_error( "Factors passed into HybridNonlinearFactor need to be nonlinear!"); @@ -133,9 +135,11 @@ class HybridNonlinearFactor : public HybridFactor { */ AlgebraicDecisionTree errorTree(const Values& continuousValues) const { // functor to convert from sharedFactor to double error value. - auto errorFunc = [continuousValues](const sharedFactor& factor) { - return factor->error(continuousValues); - }; + auto errorFunc = + [continuousValues](const std::pair& f) { + auto [factor, val] = f; + return factor->error(continuousValues) + val; + }; DecisionTree result(factors_, errorFunc); return result; } @@ -150,12 +154,10 @@ class HybridNonlinearFactor : public HybridFactor { double error(const Values& continuousValues, const DiscreteValues& discreteValues) const { // Retrieve the factor corresponding to the assignment in discreteValues. - auto factor = factors_(discreteValues); + auto [factor, val] = factors_(discreteValues); // Compute the error for the selected factor const double factorError = factor->error(continuousValues); - if (normalized_) return factorError; - return factorError + this->nonlinearFactorLogNormalizingConstant( - factor, continuousValues); + return factorError + val; } /** @@ -175,7 +177,7 @@ class HybridNonlinearFactor : public HybridFactor { */ size_t dim() const { const auto assignments = DiscreteValues::CartesianProduct(discreteKeys_); - auto factor = factors_(assignments.at(0)); + auto [factor, val] = factors_(assignments.at(0)); return factor->dim(); } @@ -189,9 +191,11 @@ class HybridNonlinearFactor : public HybridFactor { std::cout << (s.empty() ? "" : s + " "); Base::print("", keyFormatter); std::cout << "\nHybridNonlinearFactor\n"; - auto valueFormatter = [](const sharedFactor& v) { - if (v) { - return "Nonlinear factor on " + std::to_string(v->size()) + " keys"; + auto valueFormatter = [](const std::pair& v) { + auto [factor, val] = v; + if (factor) { + return "Nonlinear factor on " + std::to_string(factor->size()) + + " keys"; } else { return std::string("nullptr"); } @@ -211,8 +215,10 @@ class HybridNonlinearFactor : public HybridFactor { static_cast(other)); // Ensure that this HybridNonlinearFactor and `f` have the same `factors_`. - auto compare = [tol](const sharedFactor& a, const sharedFactor& b) { - return traits::Equals(*a, *b, tol); + auto compare = [tol](const std::pair& a, + const std::pair& b) { + return traits::Equals(*a.first, *b.first, tol) && + (a.second == b.second); }; if (!factors_.equals(f.factors_, compare)) return false; @@ -230,7 +236,7 @@ class HybridNonlinearFactor : public HybridFactor { GaussianFactor::shared_ptr linearize( const Values& continuousValues, const DiscreteValues& discreteValues) const { - auto factor = factors_(discreteValues); + auto [factor, val] = factors_(discreteValues); return factor->linearize(continuousValues); } @@ -238,12 +244,14 @@ class HybridNonlinearFactor : public HybridFactor { std::shared_ptr linearize( const Values& continuousValues) const { // functional to linearize each factor in the decision tree - auto linearizeDT = [continuousValues](const sharedFactor& factor) { - return factor->linearize(continuousValues); - }; + auto linearizeDT = + [continuousValues](const std::pair& f) { + auto [factor, val] = f; + return {factor->linearize(continuousValues), val}; + }; - DecisionTree linearized_factors( - factors_, linearizeDT); + DecisionTree> + linearized_factors(factors_, linearizeDT); return std::make_shared( continuousKeys_, discreteKeys_, linearized_factors); diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index f08d29fd5..851a50698 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -159,9 +159,10 @@ struct Switching { for (size_t k = 0; k < K - 1; k++) { KeyVector keys = {X(k), X(k + 1)}; auto motion_models = motionModels(k, between_sigma); - std::vector components; + std::vector> components; for (auto &&f : motion_models) { - components.push_back(std::dynamic_pointer_cast(f)); + components.push_back( + {std::dynamic_pointer_cast(f), 0.0}); } nonlinearFactorGraph.emplace_shared( keys, DiscreteKeys{modes[k]}, components); diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index b35468302..bf3301d2f 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -387,7 +387,8 @@ TEST(HybridBayesNet, Sampling) { std::make_shared>(X(0), X(1), 0, noise_model); auto one_motion = std::make_shared>(X(0), X(1), 1, noise_model); - std::vector factors = {zero_motion, one_motion}; + std::vector> factors = { + {zero_motion, 0.0}, {one_motion, 0.0}}; nfg.emplace_shared>(X(0), 0.0, noise_model); nfg.emplace_shared( KeyVector{X(0), X(1)}, DiscreteKeys{DiscreteKey(M(0), 2)}, factors); diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 308daca9a..9e64b73da 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -435,9 +435,10 @@ static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() { std::make_shared>(X(0), X(1), 0, noise_model); const auto one_motion = std::make_shared>(X(0), X(1), 1, noise_model); - nfg.emplace_shared( - KeyVector{X(0), X(1)}, DiscreteKeys{m}, - std::vector{zero_motion, one_motion}); + std::vector> components = { + {zero_motion, 0.0}, {one_motion, 0.0}}; + nfg.emplace_shared(KeyVector{X(0), X(1)}, + DiscreteKeys{m}, components); return nfg; } @@ -589,7 +590,8 @@ TEST(HybridEstimation, ModeSelection) { model1 = std::make_shared( X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight)); - std::vector components = {model0, model1}; + std::vector> components = { + {model0, 0.0}, {model1, 0.0}}; KeyVector keys = {X(0), X(1)}; HybridNonlinearFactor mf(keys, modes, components); @@ -680,7 +682,8 @@ TEST(HybridEstimation, ModeSelection2) { model1 = std::make_shared>( X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_tight)); - std::vector components = {model0, model1}; + std::vector> components = { + {model0, 0.0}, {model1, 0.0}}; KeyVector keys = {X(0), X(1)}; HybridNonlinearFactor mf(keys, modes, components); diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index d0a4a79af..cf983254a 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -420,7 +420,8 @@ TEST(HybridGaussianISAM, NonTrivial) { noise_model), moving = std::make_shared(W(0), W(1), odometry, noise_model); - std::vector components = {moving, still}; + std::vector> components = { + {moving, 0.0}, {still, 0.0}}; auto mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); fg.push_back(mixtureFactor); @@ -460,7 +461,7 @@ TEST(HybridGaussianISAM, NonTrivial) { noise_model); moving = std::make_shared(W(1), W(2), odometry, noise_model); - components = {moving, still}; + components = {{moving, 0.0}, {still, 0.0}}; mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); fg.push_back(mixtureFactor); @@ -503,7 +504,7 @@ TEST(HybridGaussianISAM, NonTrivial) { noise_model); moving = std::make_shared(W(2), W(3), odometry, noise_model); - components = {moving, still}; + components = {{moving, 0.0}, {still, 0.0}}; mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); fg.push_back(mixtureFactor); diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp index 71d5108a0..fa31fbe7d 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp @@ -58,7 +58,8 @@ TEST(HybridNonlinearFactor, Printing) { std::make_shared>(X(1), X(2), between0, model); auto f1 = std::make_shared>(X(1), X(2), between1, model); - std::vector factors{f0, f1}; + std::vector> factors{ + {f0, 0.0}, {f1, 0.0}}; HybridNonlinearFactor mixtureFactor({X(1), X(2)}, {m1}, factors); @@ -86,7 +87,8 @@ static HybridNonlinearFactor getHybridNonlinearFactor() { std::make_shared>(X(1), X(2), between0, model); auto f1 = std::make_shared>(X(1), X(2), between1, model); - std::vector factors{f0, f1}; + std::vector> factors{ + {f0, 0.0}, {f1, 0.0}}; return HybridNonlinearFactor({X(1), X(2)}, {m1}, factors); } diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index fd3494bea..947c24677 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -131,7 +131,8 @@ TEST(HybridGaussianFactorGraph, Resize) { auto still = std::make_shared(X(0), X(1), 0.0, noise_model), moving = std::make_shared(X(0), X(1), 1.0, noise_model); - std::vector components = {still, moving}; + std::vector> components = { + {still, 0.0}, {moving, 0.0}}; auto dcFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); nhfg.push_back(dcFactor); @@ -162,7 +163,8 @@ TEST(HybridGaussianFactorGraph, HybridNonlinearFactor) { auto still = std::make_shared(X(0), X(1), 0.0, noise_model), moving = std::make_shared(X(0), X(1), 1.0, noise_model); - std::vector components = {still, moving}; + std::vector> components = { + {still, 0.0}, {moving, 0.0}}; // Check for exception when number of continuous keys are under-specified. KeyVector contKeys = {X(0)}; @@ -801,7 +803,8 @@ TEST(HybridFactorGraph, DefaultDecisionTree) { noise_model), moving = std::make_shared(X(0), X(1), odometry, noise_model); - std::vector motion_models = {still, moving}; + std::vector> motion_models = + {{still, 0.0}, {moving, 0.0}}; fg.emplace_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, motion_models); diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index f4054c11a..7519162eb 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -439,7 +439,8 @@ TEST(HybridNonlinearISAM, NonTrivial) { noise_model), moving = std::make_shared(W(0), W(1), odometry, noise_model); - std::vector components = {moving, still}; + std::vector> components = { + {moving, 0.0}, {still, 0.0}}; auto mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); fg.push_back(mixtureFactor); @@ -479,7 +480,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { noise_model); moving = std::make_shared(W(1), W(2), odometry, noise_model); - components = {moving, still}; + components = {{moving, 0.0}, {still, 0.0}}; mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); fg.push_back(mixtureFactor); @@ -522,7 +523,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { noise_model); moving = std::make_shared(W(2), W(3), odometry, noise_model); - components = {moving, still}; + components = {{moving, 0.0}, {still, 0.0}}; mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); fg.push_back(mixtureFactor); From 0bbf16bf4b9677f4153d253a8d5b59f470a7d25e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 13 Sep 2024 17:37:29 -0400 Subject: [PATCH 216/398] HybridGaussianFactor tests passing --- gtsam/hybrid/HybridGaussianConditional.cpp | 17 ++++------ gtsam/hybrid/HybridGaussianFactor.cpp | 30 ++++++++++------ gtsam/hybrid/HybridGaussianFactor.h | 19 ++++++----- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 34 +++++++++++-------- gtsam/hybrid/HybridNonlinearFactor.h | 9 ++--- .../hybrid/tests/testHybridGaussianFactor.cpp | 14 +++++--- 6 files changed, 71 insertions(+), 52 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 42e702155..093d50fbf 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -220,22 +220,19 @@ std::shared_ptr HybridGaussianConditional::likelihood( const DiscreteKeys discreteParentKeys = discreteKeys(); const KeyVector continuousParentKeys = continuousParents(); const HybridGaussianFactor::Factors likelihoods( - conditionals_, [&](const GaussianConditional::shared_ptr &conditional) { - const auto likelihood_m = conditional->likelihood(given); + conditionals_, + [&](const GaussianConditional::shared_ptr &conditional) + -> std::pair { + auto likelihood_m = conditional->likelihood(given); const double Cgm_Kgcm = logConstant_ - conditional->logNormalizationConstant(); if (Cgm_Kgcm == 0.0) { - return likelihood_m; + return {likelihood_m, 0.0}; } else { // Add a constant factor to the likelihood in case the noise models // are not all equal. - GaussianFactorGraph gfg; - gfg.push_back(likelihood_m); - Vector c(1); - c << std::sqrt(2.0 * Cgm_Kgcm); - auto constantFactor = std::make_shared(c); - gfg.push_back(constantFactor); - return std::make_shared(gfg); + double c = std::sqrt(2.0 * Cgm_Kgcm); + return {likelihood_m, c}; } }); return std::make_shared( diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index f7207bf81..6fb453e75 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -46,8 +46,10 @@ bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const { // Check the base and the factors: return Base::equals(*e, tol) && factors_.equals(e->factors_, - [tol](const sharedFactor &f1, const sharedFactor &f2) { - return f1->equals(*f2, tol); + [tol](const std::pair &f1, + const std::pair &f2) { + return f1.first->equals(*f2.first, tol) && + (f1.second == f2.second); }); } @@ -63,11 +65,13 @@ void HybridGaussianFactor::print(const std::string &s, } else { factors_.print( "", [&](Key k) { return formatter(k); }, - [&](const sharedFactor &gf) -> std::string { + [&](const std::pair &gfv) -> std::string { + auto [gf, val] = gfv; RedirectCout rd; std::cout << ":\n"; if (gf) { gf->print("", formatter); + std::cout << "value: " << val << std::endl; return rd.str(); } else { return "nullptr"; @@ -78,8 +82,8 @@ void HybridGaussianFactor::print(const std::string &s, } /* *******************************************************************************/ -HybridGaussianFactor::sharedFactor HybridGaussianFactor::operator()( - const DiscreteValues &assignment) const { +std::pair +HybridGaussianFactor::operator()(const DiscreteValues &assignment) const { return factors_(assignment); } @@ -99,7 +103,9 @@ GaussianFactorGraphTree HybridGaussianFactor::add( /* *******************************************************************************/ GaussianFactorGraphTree HybridGaussianFactor::asGaussianFactorGraphTree() const { - auto wrap = [](const sharedFactor &gf) { return GaussianFactorGraph{gf}; }; + auto wrap = [](const std::pair &gfv) { + return GaussianFactorGraph{gfv.first}; + }; return {factors_, wrap}; } @@ -107,17 +113,19 @@ GaussianFactorGraphTree HybridGaussianFactor::asGaussianFactorGraphTree() AlgebraicDecisionTree HybridGaussianFactor::errorTree( const VectorValues &continuousValues) const { // functor to convert from sharedFactor to double error value. - auto errorFunc = [&continuousValues](const sharedFactor &gf) { - return gf->error(continuousValues); - }; + auto errorFunc = + [&continuousValues](const std::pair &gfv) { + auto [gf, val] = gfv; + return gf->error(continuousValues) + val; + }; DecisionTree error_tree(factors_, errorFunc); return error_tree; } /* *******************************************************************************/ double HybridGaussianFactor::error(const HybridValues &values) const { - const sharedFactor gf = factors_(values.discrete()); - return gf->error(values.continuous()); + auto &&[gf, val] = factors_(values.discrete()); + return gf->error(values.continuous()) + val; } } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 650f7a5a7..30c35dd2b 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -53,7 +53,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { using sharedFactor = std::shared_ptr; /// typedef for Decision Tree of Gaussian factors and log-constant. - using Factors = DecisionTree; + using Factors = DecisionTree>; private: /// Decision tree of Gaussian factors indexed by discrete keys. @@ -80,8 +80,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * @param continuousKeys A vector of keys representing continuous variables. * @param discreteKeys A vector of keys representing discrete variables and * their cardinalities. - * @param factors The decision tree of Gaussian factors stored - * as the mixture density. + * @param factors The decision tree of Gaussian factors and arbitrary scalars. */ HybridGaussianFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, @@ -93,11 +92,12 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. - * @param factors Vector of gaussian factor shared pointers. + * @param factors Vector of gaussian factor shared pointers + * and arbitrary scalars. */ - HybridGaussianFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys, - const std::vector &factors) + HybridGaussianFactor( + const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, + const std::vector> &factors) : HybridGaussianFactor(continuousKeys, discreteKeys, Factors(discreteKeys, factors)) {} @@ -114,8 +114,9 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// @name Standard API /// @{ - /// Get factor at a given discrete assignment. - sharedFactor operator()(const DiscreteValues &assignment) const; + /// Get the factor and scalar at a given discrete assignment. + std::pair operator()( + const DiscreteValues &assignment) const; /** * @brief Combine the Gaussian Factor Graphs in `sum` and `this` while diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 900ec90c7..776f559a4 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -92,13 +92,15 @@ void HybridGaussianFactorGraph::printErrors( // Clear the stringstream ss.str(std::string()); - if (auto gmf = std::dynamic_pointer_cast(factor)) { + if (auto hgf = std::dynamic_pointer_cast(factor)) { if (factor == nullptr) { std::cout << "nullptr" << "\n"; } else { - gmf->operator()(values.discrete())->print(ss.str(), keyFormatter); - std::cout << "error = " << gmf->error(values) << std::endl; + auto [factor, val] = hgf->operator()(values.discrete()); + factor->print(ss.str(), keyFormatter); + std::cout << "value: " << val << std::endl; + std::cout << "error = " << factor->error(values) << std::endl; } } else if (auto hc = std::dynamic_pointer_cast(factor)) { if (factor == nullptr) { @@ -262,9 +264,12 @@ discreteElimination(const HybridGaussianFactorGraph &factors, // Case where we have a HybridGaussianFactor with no continuous keys. // In this case, compute discrete probabilities. auto logProbability = - [&](const GaussianFactor::shared_ptr &factor) -> double { - if (!factor) return 0.0; - return -factor->error(VectorValues()); + [&](const std::pair &fv) + -> double { + auto [factor, val] = fv; + double v = 0.5 * val * val; + if (!factor) return -v; + return -(factor->error(VectorValues()) + v); }; AlgebraicDecisionTree logProbabilities = DecisionTree(gmf->factors(), logProbability); @@ -348,7 +353,8 @@ static std::shared_ptr createHybridGaussianFactor( const KeyVector &continuousSeparator, const DiscreteKeys &discreteSeparator) { // Correct for the normalization constant used up by the conditional - auto correct = [&](const Result &pair) -> GaussianFactor::shared_ptr { + auto correct = + [&](const Result &pair) -> std::pair { const auto &[conditional, factor] = pair; if (factor) { auto hf = std::dynamic_pointer_cast(factor); @@ -357,10 +363,10 @@ static std::shared_ptr createHybridGaussianFactor( // as per the Hessian definition hf->constantTerm() += 2.0 * conditional->logNormalizationConstant(); } - return factor; + return {factor, 0.0}; }; - DecisionTree newFactors(eliminationResults, - correct); + DecisionTree> newFactors( + eliminationResults, correct); return std::make_shared(continuousSeparator, discreteSeparator, newFactors); @@ -597,10 +603,10 @@ GaussianFactorGraph HybridGaussianFactorGraph::operator()( gfg.push_back(gf); } else if (auto gc = std::dynamic_pointer_cast(f)) { gfg.push_back(gf); - } else if (auto gmf = std::dynamic_pointer_cast(f)) { - gfg.push_back((*gmf)(assignment)); - } else if (auto gm = dynamic_pointer_cast(f)) { - gfg.push_back((*gm)(assignment)); + } else if (auto hgf = std::dynamic_pointer_cast(f)) { + gfg.push_back((*hgf)(assignment).first); + } else if (auto hgc = dynamic_pointer_cast(f)) { + gfg.push_back((*hgc)(assignment)); } else { continue; } diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 2c2810f52..6a6055bd8 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -245,10 +245,11 @@ class HybridNonlinearFactor : public HybridFactor { const Values& continuousValues) const { // functional to linearize each factor in the decision tree auto linearizeDT = - [continuousValues](const std::pair& f) { - auto [factor, val] = f; - return {factor->linearize(continuousValues), val}; - }; + [continuousValues](const std::pair& f) + -> std::pair { + auto [factor, val] = f; + return {factor->linearize(continuousValues), val}; + }; DecisionTree> linearized_factors(factors_, linearizeDT); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 0b40ecc72..7e3e2defa 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -71,8 +71,10 @@ TEST(HybridGaussianFactor, Sum) { auto f20 = std::make_shared(X(1), A1, X(3), A3, b); auto f21 = std::make_shared(X(1), A1, X(3), A3, b); auto f22 = std::make_shared(X(1), A1, X(3), A3, b); - std::vector factorsA{f10, f11}; - std::vector factorsB{f20, f21, f22}; + std::vector> factorsA{ + {f10, 0.0}, {f11, 0.0}}; + std::vector> factorsB{ + {f20, 0.0}, {f21, 0.0}, {f22, 0.0}}; // TODO(Frank): why specify keys at all? And: keys in factor should be *all* // keys, deviating from Kevin's scheme. Should we index DT on DiscreteKey? @@ -109,7 +111,8 @@ TEST(HybridGaussianFactor, Printing) { auto b = Matrix::Zero(2, 1); auto f10 = std::make_shared(X(1), A1, X(2), A2, b); auto f11 = std::make_shared(X(1), A1, X(2), A2, b); - std::vector factors{f10, f11}; + std::vector> factors{ + {f10, 0.0}, {f11, 0.0}}; HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors); @@ -128,6 +131,7 @@ Hybrid [x1 x2; 1]{ ] b = [ 0 0 ] No noise model +value: 0 1 Leaf : A[x1] = [ @@ -140,6 +144,7 @@ Hybrid [x1 x2; 1]{ ] b = [ 0 0 ] No noise model +value: 0 } )"; @@ -178,7 +183,8 @@ TEST(HybridGaussianFactor, Error) { auto f0 = std::make_shared(X(1), A01, X(2), A02, b); auto f1 = std::make_shared(X(1), A11, X(2), A12, b); - std::vector factors{f0, f1}; + std::vector> factors{{f0, 0.0}, + {f1, 0.0}}; HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors); From f3b920257d6177b8a8975b54e10081e2d97b8cb0 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 14 Sep 2024 13:52:56 -0400 Subject: [PATCH 217/398] update tests --- gtsam/hybrid/tests/Switching.h | 11 ++-- gtsam/hybrid/tests/testHybridEstimation.cpp | 10 +-- gtsam/hybrid/tests/testHybridFactorGraph.cpp | 6 +- .../tests/testHybridGaussianConditional.cpp | 2 +- .../hybrid/tests/testHybridGaussianFactor.cpp | 11 ++-- .../tests/testHybridGaussianFactorGraph.cpp | 62 +++++++++---------- .../tests/testHybridNonlinearFactorGraph.cpp | 8 +++ .../hybrid/tests/testSerializationHybrid.cpp | 2 +- 8 files changed, 61 insertions(+), 51 deletions(-) diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 851a50698..f799168f2 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -57,12 +57,15 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( // keyFunc(1) to keyFunc(n+1) for (size_t t = 1; t < n; t++) { - hfg.add(HybridGaussianFactor( - {keyFunc(t), keyFunc(t + 1)}, {{dKeyFunc(t), 2}}, + std::vector components = { {std::make_shared(keyFunc(t), I_3x3, keyFunc(t + 1), I_3x3, Z_3x1), - std::make_shared(keyFunc(t), I_3x3, keyFunc(t + 1), - I_3x3, Vector3::Ones())})); + 0.0}, + {std::make_shared(keyFunc(t), I_3x3, keyFunc(t + 1), + I_3x3, Vector3::Ones()), + 0.0}}; + hfg.add(HybridGaussianFactor({keyFunc(t), keyFunc(t + 1)}, + {{dKeyFunc(t), 2}}, components)); if (t > 1) { hfg.add(DecisionTreeFactor({{dKeyFunc(t - 1), 2}, {dKeyFunc(t), 2}}, diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 9e64b73da..f7e1a6cd6 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -542,8 +542,10 @@ std::shared_ptr mixedVarianceFactor( double logNormalizationConstant = log(1.0 / noise_tight); double logConstant = -0.5 * d * log2pi + logNormalizationConstant; - auto func = [&](const Assignment& assignment, - const GaussianFactor::shared_ptr& gf) { + auto func = + [&](const Assignment& assignment, + const GaussianFactorValuePair& gfv) -> GaussianFactorValuePair { + auto [gf, val] = gfv; if (assignment.at(mode) != tight_index) { double factor_log_constant = -0.5 * d * log2pi + log(1.0 / noise_loose); @@ -555,9 +557,9 @@ std::shared_ptr mixedVarianceFactor( } _gfg.emplace_shared(c); - return std::make_shared(_gfg); + return {std::make_shared(_gfg), 0.0}; } else { - return dynamic_pointer_cast(gf); + return {dynamic_pointer_cast(gf), 0.0}; } }; auto updated_components = gmf->factors().apply(func); diff --git a/gtsam/hybrid/tests/testHybridFactorGraph.cpp b/gtsam/hybrid/tests/testHybridFactorGraph.cpp index bc0ef91d7..8a2e3be3c 100644 --- a/gtsam/hybrid/tests/testHybridFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridFactorGraph.cpp @@ -52,9 +52,9 @@ TEST(HybridFactorGraph, Keys) { // Add a gaussian mixture factor ϕ(x1, c1) DiscreteKey m1(M(1), 2); - DecisionTree dt( - M(1), std::make_shared(X(1), I_3x3, Z_3x1), - std::make_shared(X(1), I_3x3, Vector3::Ones())); + DecisionTree dt( + M(1), {std::make_shared(X(1), I_3x3, Z_3x1), 0.0}, + {std::make_shared(X(1), I_3x3, Vector3::Ones()), 0.0}); hfg.add(HybridGaussianFactor({X(1)}, {m1}, dt)); KeySet expected_continuous{X(0), X(1)}; diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index 6156fee96..cfa7e5a5a 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -183,7 +183,7 @@ TEST(HybridGaussianConditional, Likelihood2) { // Check the detailed JacobianFactor calculation for mode==1. { // We have a JacobianFactor - const auto gf1 = (*likelihood)(assignment1); + const auto gf1 = (*likelihood)(assignment1).first; const auto jf1 = std::dynamic_pointer_cast(gf1); CHECK(jf1); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 7e3e2defa..6e41792aa 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -71,9 +71,8 @@ TEST(HybridGaussianFactor, Sum) { auto f20 = std::make_shared(X(1), A1, X(3), A3, b); auto f21 = std::make_shared(X(1), A1, X(3), A3, b); auto f22 = std::make_shared(X(1), A1, X(3), A3, b); - std::vector> factorsA{ - {f10, 0.0}, {f11, 0.0}}; - std::vector> factorsB{ + std::vector factorsA{{f10, 0.0}, {f11, 0.0}}; + std::vector factorsB{ {f20, 0.0}, {f21, 0.0}, {f22, 0.0}}; // TODO(Frank): why specify keys at all? And: keys in factor should be *all* @@ -111,8 +110,7 @@ TEST(HybridGaussianFactor, Printing) { auto b = Matrix::Zero(2, 1); auto f10 = std::make_shared(X(1), A1, X(2), A2, b); auto f11 = std::make_shared(X(1), A1, X(2), A2, b); - std::vector> factors{ - {f10, 0.0}, {f11, 0.0}}; + std::vector factors{{f10, 0.0}, {f11, 0.0}}; HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors); @@ -183,8 +181,7 @@ TEST(HybridGaussianFactor, Error) { auto f0 = std::make_shared(X(1), A01, X(2), A02, b); auto f1 = std::make_shared(X(1), A11, X(2), A12, b); - std::vector> factors{{f0, 0.0}, - {f1, 0.0}}; + std::vector factors{{f0, 0.0}, {f1, 0.0}}; HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index acd7b280f..69bc0dd58 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -127,9 +127,9 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { // Add a gaussian mixture factor ϕ(x1, c1) DiscreteKey m1(M(1), 2); - DecisionTree dt( - M(1), std::make_shared(X(1), I_3x3, Z_3x1), - std::make_shared(X(1), I_3x3, Vector3::Ones())); + DecisionTree dt( + M(1), {std::make_shared(X(1), I_3x3, Z_3x1), 0.0}, + {std::make_shared(X(1), I_3x3, Vector3::Ones()), 0.0}); hfg.add(HybridGaussianFactor({X(1)}, {m1}, dt)); auto result = hfg.eliminateSequential(); @@ -153,9 +153,9 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) { // Add factor between x0 and x1 hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - std::vector factors = { - std::make_shared(X(1), I_3x3, Z_3x1), - std::make_shared(X(1), I_3x3, Vector3::Ones())}; + std::vector factors = { + {std::make_shared(X(1), I_3x3, Z_3x1), 0.0}, + {std::make_shared(X(1), I_3x3, Vector3::Ones()), 0.0}}; hfg.add(HybridGaussianFactor({X(1)}, {m1}, factors)); // Discrete probability table for c1 @@ -178,10 +178,10 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - hfg.add(HybridGaussianFactor( - {X(1)}, {{M(1), 2}}, - {std::make_shared(X(1), I_3x3, Z_3x1), - std::make_shared(X(1), I_3x3, Vector3::Ones())})); + std::vector factors = { + {std::make_shared(X(1), I_3x3, Z_3x1), 0.0}, + {std::make_shared(X(1), I_3x3, Vector3::Ones()), 0.0}}; + hfg.add(HybridGaussianFactor({X(1)}, {{M(1), 2}}, factors)); hfg.add(DecisionTreeFactor(m1, {2, 8})); // TODO(Varun) Adding extra discrete variable not connected to continuous @@ -208,9 +208,9 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) { hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); // Decision tree with different modes on x1 - DecisionTree dt( - M(1), std::make_shared(X(1), I_3x3, Z_3x1), - std::make_shared(X(1), I_3x3, Vector3::Ones())); + DecisionTree dt( + M(1), {std::make_shared(X(1), I_3x3, Z_3x1), 0.0}, + {std::make_shared(X(1), I_3x3, Vector3::Ones()), 0.0}); // Hybrid factor P(x1|c1) hfg.add(HybridGaussianFactor({X(1)}, {m}, dt)); @@ -238,14 +238,14 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1)); { - hfg.add(HybridGaussianFactor( - {X(0)}, {{M(0), 2}}, - {std::make_shared(X(0), I_3x3, Z_3x1), - std::make_shared(X(0), I_3x3, Vector3::Ones())})); + std::vector factors = { + {std::make_shared(X(0), I_3x3, Z_3x1), 0.0}, + {std::make_shared(X(0), I_3x3, Vector3::Ones()), 0.0}}; + hfg.add(HybridGaussianFactor({X(0)}, {{M(0), 2}}, factors)); - DecisionTree dt1( - M(1), std::make_shared(X(2), I_3x3, Z_3x1), - std::make_shared(X(2), I_3x3, Vector3::Ones())); + DecisionTree dt1( + M(1), {std::make_shared(X(2), I_3x3, Z_3x1), 0.0}, + {std::make_shared(X(2), I_3x3, Vector3::Ones()), 0.0}); hfg.add(HybridGaussianFactor({X(2)}, {{M(1), 2}}, dt1)); } @@ -256,15 +256,15 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { hfg.add(JacobianFactor(X(4), I_3x3, X(5), -I_3x3, Z_3x1)); { - DecisionTree dt( - M(3), std::make_shared(X(3), I_3x3, Z_3x1), - std::make_shared(X(3), I_3x3, Vector3::Ones())); + DecisionTree dt( + M(3), {std::make_shared(X(3), I_3x3, Z_3x1), 0.0}, + {std::make_shared(X(3), I_3x3, Vector3::Ones()), 0.0}); hfg.add(HybridGaussianFactor({X(3)}, {{M(3), 2}}, dt)); - DecisionTree dt1( - M(2), std::make_shared(X(5), I_3x3, Z_3x1), - std::make_shared(X(5), I_3x3, Vector3::Ones())); + DecisionTree dt1( + M(2), {std::make_shared(X(5), I_3x3, Z_3x1), 0.0}, + {std::make_shared(X(5), I_3x3, Vector3::Ones()), 0.0}); hfg.add(HybridGaussianFactor({X(5)}, {{M(2), 2}}, dt1)); } @@ -552,9 +552,9 @@ TEST(HybridGaussianFactorGraph, optimize) { hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - DecisionTree dt( - C(1), std::make_shared(X(1), I_3x3, Z_3x1), - std::make_shared(X(1), I_3x3, Vector3::Ones())); + DecisionTree dt( + C(1), {std::make_shared(X(1), I_3x3, Z_3x1), 0.0}, + {std::make_shared(X(1), I_3x3, Vector3::Ones()), 0.0}); hfg.add(HybridGaussianFactor({X(1)}, {c1}, dt)); @@ -734,8 +734,8 @@ TEST(HybridGaussianFactorGraph, assembleGraphTree) { // Expected decision tree with two factor graphs: // f(x0;mode=0)P(x0) and f(x0;mode=1)P(x0) GaussianFactorGraphTree expected{ - M(0), GaussianFactorGraph(std::vector{(*mixture)(d0), prior}), - GaussianFactorGraph(std::vector{(*mixture)(d1), prior})}; + M(0), GaussianFactorGraph(std::vector{(*mixture)(d0).first, prior}), + GaussianFactorGraph(std::vector{(*mixture)(d1).first, prior})}; EXPECT(assert_equal(expected(d0), actual(d0), 1e-5)); EXPECT(assert_equal(expected(d1), actual(d1), 1e-5)); diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 947c24677..4f1a6fa46 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -526,6 +526,7 @@ Hybrid [x0 x1; m0]{ ] b = [ -1 ] No noise model +value: 0 1 Leaf : A[x0] = [ @@ -536,6 +537,7 @@ Hybrid [x0 x1; m0]{ ] b = [ -0 ] No noise model +value: 0 } factor 2: @@ -551,6 +553,7 @@ Hybrid [x1 x2; m1]{ ] b = [ -1 ] No noise model +value: 0 1 Leaf : A[x1] = [ @@ -561,6 +564,7 @@ Hybrid [x1 x2; m1]{ ] b = [ -0 ] No noise model +value: 0 } factor 3: @@ -609,6 +613,7 @@ Hybrid [x0 x1; m0]{ ] b = [ -1 ] No noise model +value: 0 1 Leaf: A[x0] = [ @@ -619,6 +624,7 @@ Hybrid [x0 x1; m0]{ ] b = [ -0 ] No noise model +value: 0 } factor 2: @@ -633,6 +639,7 @@ Hybrid [x1 x2; m1]{ ] b = [ -1 ] No noise model +value: 0 1 Leaf: A[x1] = [ @@ -643,6 +650,7 @@ Hybrid [x1 x2; m1]{ ] b = [ -0 ] No noise model +value: 0 } factor 3: diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp index 365dfcc9f..74cfa72e6 100644 --- a/gtsam/hybrid/tests/testSerializationHybrid.cpp +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -83,7 +83,7 @@ TEST(HybridSerialization, HybridGaussianFactor) { auto b1 = Matrix::Ones(2, 1); auto f0 = std::make_shared(X(0), A, b0); auto f1 = std::make_shared(X(0), A, b1); - std::vector factors{f0, f1}; + std::vector factors{{f0, 0.0}, {f1, 0.0}}; const HybridGaussianFactor factor(continuousKeys, discreteKeys, factors); From 5ceda1e157c96f7347902a0a0b53c4eaa0fd1010 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 14 Sep 2024 13:53:31 -0400 Subject: [PATCH 218/398] update HybridGaussianFactor to allow for tree of pairs --- gtsam/hybrid/HybridGaussianConditional.cpp | 13 +++++++---- gtsam/hybrid/HybridGaussianFactor.cpp | 27 ++++++++++------------ gtsam/hybrid/HybridGaussianFactor.h | 14 ++++++----- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 11 ++++----- gtsam/hybrid/HybridNonlinearFactor.h | 2 +- 5 files changed, 34 insertions(+), 33 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 093d50fbf..29e1434b1 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -222,8 +222,8 @@ std::shared_ptr HybridGaussianConditional::likelihood( const HybridGaussianFactor::Factors likelihoods( conditionals_, [&](const GaussianConditional::shared_ptr &conditional) - -> std::pair { - auto likelihood_m = conditional->likelihood(given); + -> GaussianFactorValuePair { + const auto likelihood_m = conditional->likelihood(given); const double Cgm_Kgcm = logConstant_ - conditional->logNormalizationConstant(); if (Cgm_Kgcm == 0.0) { @@ -231,8 +231,13 @@ std::shared_ptr HybridGaussianConditional::likelihood( } else { // Add a constant factor to the likelihood in case the noise models // are not all equal. - double c = std::sqrt(2.0 * Cgm_Kgcm); - return {likelihood_m, c}; + GaussianFactorGraph gfg; + gfg.push_back(likelihood_m); + Vector c(1); + c << std::sqrt(2.0 * Cgm_Kgcm); + auto constantFactor = std::make_shared(c); + gfg.push_back(constantFactor); + return {std::make_shared(gfg), 0.0}; } }); return std::make_shared( diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index 6fb453e75..f8d85a253 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -45,12 +45,10 @@ bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const { // Check the base and the factors: return Base::equals(*e, tol) && - factors_.equals(e->factors_, - [tol](const std::pair &f1, - const std::pair &f2) { - return f1.first->equals(*f2.first, tol) && - (f1.second == f2.second); - }); + factors_.equals(e->factors_, [tol](const GaussianFactorValuePair &f1, + const GaussianFactorValuePair &f2) { + return f1.first->equals(*f2.first, tol) && (f1.second == f2.second); + }); } /* *******************************************************************************/ @@ -65,7 +63,7 @@ void HybridGaussianFactor::print(const std::string &s, } else { factors_.print( "", [&](Key k) { return formatter(k); }, - [&](const std::pair &gfv) -> std::string { + [&](const GaussianFactorValuePair &gfv) -> std::string { auto [gf, val] = gfv; RedirectCout rd; std::cout << ":\n"; @@ -82,8 +80,8 @@ void HybridGaussianFactor::print(const std::string &s, } /* *******************************************************************************/ -std::pair -HybridGaussianFactor::operator()(const DiscreteValues &assignment) const { +GaussianFactorValuePair HybridGaussianFactor::operator()( + const DiscreteValues &assignment) const { return factors_(assignment); } @@ -103,7 +101,7 @@ GaussianFactorGraphTree HybridGaussianFactor::add( /* *******************************************************************************/ GaussianFactorGraphTree HybridGaussianFactor::asGaussianFactorGraphTree() const { - auto wrap = [](const std::pair &gfv) { + auto wrap = [](const GaussianFactorValuePair &gfv) { return GaussianFactorGraph{gfv.first}; }; return {factors_, wrap}; @@ -113,11 +111,10 @@ GaussianFactorGraphTree HybridGaussianFactor::asGaussianFactorGraphTree() AlgebraicDecisionTree HybridGaussianFactor::errorTree( const VectorValues &continuousValues) const { // functor to convert from sharedFactor to double error value. - auto errorFunc = - [&continuousValues](const std::pair &gfv) { - auto [gf, val] = gfv; - return gf->error(continuousValues) + val; - }; + auto errorFunc = [&continuousValues](const GaussianFactorValuePair &gfv) { + auto [gf, v] = gfv; + return gf->error(continuousValues) + (0.5 * v * v); + }; DecisionTree error_tree(factors_, errorFunc); return error_tree; } diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 30c35dd2b..f8a904539 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -33,6 +33,9 @@ class HybridValues; class DiscreteValues; class VectorValues; +/// Alias for pair of GaussianFactor::shared_pointer and a double value. +using GaussianFactorValuePair = std::pair; + /** * @brief Implementation of a discrete conditional mixture factor. * Implements a joint discrete-continuous factor where the discrete variable @@ -53,7 +56,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { using sharedFactor = std::shared_ptr; /// typedef for Decision Tree of Gaussian factors and log-constant. - using Factors = DecisionTree>; + using Factors = DecisionTree; private: /// Decision tree of Gaussian factors indexed by discrete keys. @@ -95,9 +98,9 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * @param factors Vector of gaussian factor shared pointers * and arbitrary scalars. */ - HybridGaussianFactor( - const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const std::vector> &factors) + HybridGaussianFactor(const KeyVector &continuousKeys, + const DiscreteKeys &discreteKeys, + const std::vector &factors) : HybridGaussianFactor(continuousKeys, discreteKeys, Factors(discreteKeys, factors)) {} @@ -115,8 +118,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// @{ /// Get the factor and scalar at a given discrete assignment. - std::pair operator()( - const DiscreteValues &assignment) const; + GaussianFactorValuePair operator()(const DiscreteValues &assignment) const; /** * @brief Combine the Gaussian Factor Graphs in `sum` and `this` while diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 776f559a4..8dbf6bd2b 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -263,9 +263,7 @@ discreteElimination(const HybridGaussianFactorGraph &factors, } else if (auto gmf = dynamic_pointer_cast(f)) { // Case where we have a HybridGaussianFactor with no continuous keys. // In this case, compute discrete probabilities. - auto logProbability = - [&](const std::pair &fv) - -> double { + auto logProbability = [&](const GaussianFactorValuePair &fv) -> double { auto [factor, val] = fv; double v = 0.5 * val * val; if (!factor) return -v; @@ -353,8 +351,7 @@ static std::shared_ptr createHybridGaussianFactor( const KeyVector &continuousSeparator, const DiscreteKeys &discreteSeparator) { // Correct for the normalization constant used up by the conditional - auto correct = - [&](const Result &pair) -> std::pair { + auto correct = [&](const Result &pair) -> GaussianFactorValuePair { const auto &[conditional, factor] = pair; if (factor) { auto hf = std::dynamic_pointer_cast(factor); @@ -365,8 +362,8 @@ static std::shared_ptr createHybridGaussianFactor( } return {factor, 0.0}; }; - DecisionTree> newFactors( - eliminationResults, correct); + DecisionTree newFactors(eliminationResults, + correct); return std::make_shared(continuousSeparator, discreteSeparator, newFactors); diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 6a6055bd8..1bd25a6b1 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -246,7 +246,7 @@ class HybridNonlinearFactor : public HybridFactor { // functional to linearize each factor in the decision tree auto linearizeDT = [continuousValues](const std::pair& f) - -> std::pair { + -> GaussianFactorValuePair { auto [factor, val] = f; return {factor->linearize(continuousValues), val}; }; From a7f5173b88c2470cc4543e5d3a50e4930379fd18 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 14 Sep 2024 14:17:40 -0400 Subject: [PATCH 219/398] test TwoStateModel with only differing covariances --- .../hybrid/tests/testHybridGaussianFactor.cpp | 98 ++++++++++++++++++- 1 file changed, 96 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 6e41792aa..ce649e521 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -526,7 +526,7 @@ TEST(HybridGaussianFactor, TwoStateModel) { /** * Test a model P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). * - * P(f01|x1,x0,m1) has different means and different covariances. + * P(x1|x0,m1) has different means and different covariances. * * Converting to a factor graph gives us * ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) @@ -616,13 +616,107 @@ TEST(HybridGaussianFactor, TwoStateModel2) { } } +/* ************************************************************************* */ +/** + * Test a model P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). + * + * P(x1|x0,m1) has the same means but different covariances. + * + * Converting to a factor graph gives us + * ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) + * + * If we only have a measurement on z0, then + * the P(m1) should be 0.5/0.5. + * Getting a measurement on z1 gives use more information. + */ +TEST(HybridGaussianFactor, TwoStateModel3) { + using namespace test_two_state_estimation; + + double mu = 1.0; + double sigma0 = 0.5, sigma1 = 2.0; + auto hybridMotionModel = CreateHybridMotionModel(mu, mu, sigma0, sigma1); + + // Start with no measurement on x1, only on x0 + const Vector1 z0(0.5); + VectorValues given; + given.insert(Z(0), z0); + + { + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + + // Check that ratio of Bayes net and factor graph for different modes is + // equal for several values of {x0,x1}. + for (VectorValues vv : + {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, + VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { + vv.insert(given); // add measurements for HBN + HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}}); + EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), + gfg.error(hv1) / hbn.error(hv1), 1e-9); + } + + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Importance sampling run with 100k samples gives 50.095/49.905 + // approximateDiscreteMarginal(hbn, hybridMotionModel, given); + + // Since no measurement on x1, we a 50/50 probability + auto p_m = bn->at(2)->asDiscrete(); + EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 0}}), 1e-9); + EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 1}}), 1e-9); + } + + { + // Now we add a measurement z1 on x1 + const Vector1 z1(4.0); // favors m==1 + given.insert(Z(1), z1); + + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + + // Check that ratio of Bayes net and factor graph for different modes is + // equal for several values of {x0,x1}. + for (VectorValues vv : + {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, + VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { + vv.insert(given); // add measurements for HBN + HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}}); + EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), + gfg.error(hv1) / hbn.error(hv1), 1e-9); + } + + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Values taken from an importance sampling run with 100k samples: + // approximateDiscreteMarginal(hbn, hybridMotionModel, given); + DiscreteConditional expected(m1, "51.7762/48.2238"); + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002)); + } + + { + // Add a different measurement z1 on x1 that favors m==1 + const Vector1 z1(7.0); + given.insert_or_assign(Z(1), z1); + + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Values taken from an importance sampling run with 100k samples: + // approximateDiscreteMarginal(hbn, hybridMotionModel, given); + DiscreteConditional expected(m1, "49.0762/50.9238"); + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.005)); + } +} + /* ************************************************************************* */ /** * Same model, P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1), but now with very informative * measurements and vastly different motion model: either stand still or move * far. This yields a very informative posterior. */ -TEST(HybridGaussianFactor, TwoStateModel3) { +TEST(HybridGaussianFactor, TwoStateModel4) { using namespace test_two_state_estimation; double mu0 = 0.0, mu1 = 10.0; From 9360165ef6ffaf5bd106b15fe2938256fab4e854 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 14 Sep 2024 14:54:36 -0400 Subject: [PATCH 220/398] update HybridGaussianFactor to leverage constant hiding for the Tree of Pairs --- gtsam/hybrid/HybridGaussianFactor.cpp | 90 +++++++++++++++++++++------ gtsam/hybrid/HybridGaussianFactor.h | 24 +++++-- 2 files changed, 91 insertions(+), 23 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index f8d85a253..f38dd6b84 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -28,11 +28,54 @@ namespace gtsam { +/** + * @brief Helper function to augment the [A|b] matrices in the factor components + * with the normalizer values. + * This is done by storing the normalizer value in + * the `b` vector as an additional row. + * + * @param factors DecisionTree of GaussianFactors and arbitrary scalars. + * Gaussian factor in factors. + * @return HybridGaussianFactor::Factors + */ +HybridGaussianFactor::Factors augment( + const HybridGaussianFactor::FactorValuePairs &factors) { + // Find the minimum value so we can "proselytize" to positive values. + // Done because we can't have sqrt of negative numbers. + auto unzipped_pair = unzip(factors); + const HybridGaussianFactor::Factors gaussianFactors = unzipped_pair.first; + const AlgebraicDecisionTree valueTree = unzipped_pair.second; + double min_value = valueTree.min(); + AlgebraicDecisionTree values = + valueTree.apply([&min_value](double n) { return n - min_value; }); + + // Finally, update the [A|b] matrices. + auto update = [&values](const Assignment &assignment, + const HybridGaussianFactor::sharedFactor &gf) { + auto jf = std::dynamic_pointer_cast(gf); + if (!jf) return gf; + // If the log_normalizer is 0, do nothing + if (values(assignment) == 0.0) return gf; + + GaussianFactorGraph gfg; + gfg.push_back(jf); + + Vector c(1); + c << std::sqrt(values(assignment)); + auto constantFactor = std::make_shared(c); + + gfg.push_back(constantFactor); + return std::dynamic_pointer_cast( + std::make_shared(gfg)); + }; + return gaussianFactors.apply(update); +} + /* *******************************************************************************/ HybridGaussianFactor::HybridGaussianFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors) - : Base(continuousKeys, discreteKeys), factors_(factors) {} + const FactorValuePairs &factors) + : Base(continuousKeys, discreteKeys), factors_(augment(factors)) {} /* *******************************************************************************/ bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const { @@ -45,10 +88,10 @@ bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const { // Check the base and the factors: return Base::equals(*e, tol) && - factors_.equals(e->factors_, [tol](const GaussianFactorValuePair &f1, - const GaussianFactorValuePair &f2) { - return f1.first->equals(*f2.first, tol) && (f1.second == f2.second); - }); + factors_.equals(e->factors_, + [tol](const sharedFactor &f1, const sharedFactor &f2) { + return f1->equals(*f2, tol); + }); } /* *******************************************************************************/ @@ -63,13 +106,11 @@ void HybridGaussianFactor::print(const std::string &s, } else { factors_.print( "", [&](Key k) { return formatter(k); }, - [&](const GaussianFactorValuePair &gfv) -> std::string { - auto [gf, val] = gfv; + [&](const sharedFactor &gf) -> std::string { RedirectCout rd; std::cout << ":\n"; if (gf) { gf->print("", formatter); - std::cout << "value: " << val << std::endl; return rd.str(); } else { return "nullptr"; @@ -80,7 +121,7 @@ void HybridGaussianFactor::print(const std::string &s, } /* *******************************************************************************/ -GaussianFactorValuePair HybridGaussianFactor::operator()( +HybridGaussianFactor::sharedFactor HybridGaussianFactor::operator()( const DiscreteValues &assignment) const { return factors_(assignment); } @@ -101,9 +142,7 @@ GaussianFactorGraphTree HybridGaussianFactor::add( /* *******************************************************************************/ GaussianFactorGraphTree HybridGaussianFactor::asGaussianFactorGraphTree() const { - auto wrap = [](const GaussianFactorValuePair &gfv) { - return GaussianFactorGraph{gfv.first}; - }; + auto wrap = [](const sharedFactor &gf) { return GaussianFactorGraph{gf}; }; return {factors_, wrap}; } @@ -111,9 +150,8 @@ GaussianFactorGraphTree HybridGaussianFactor::asGaussianFactorGraphTree() AlgebraicDecisionTree HybridGaussianFactor::errorTree( const VectorValues &continuousValues) const { // functor to convert from sharedFactor to double error value. - auto errorFunc = [&continuousValues](const GaussianFactorValuePair &gfv) { - auto [gf, v] = gfv; - return gf->error(continuousValues) + (0.5 * v * v); + auto errorFunc = [&continuousValues](const sharedFactor &gf) { + return gf->error(continuousValues); }; DecisionTree error_tree(factors_, errorFunc); return error_tree; @@ -121,8 +159,24 @@ AlgebraicDecisionTree HybridGaussianFactor::errorTree( /* *******************************************************************************/ double HybridGaussianFactor::error(const HybridValues &values) const { - auto &&[gf, val] = factors_(values.discrete()); - return gf->error(values.continuous()) + val; + const sharedFactor gf = factors_(values.discrete()); + return gf->error(values.continuous()); +} + +/* *******************************************************************************/ +double ComputeLogNormalizer( + const noiseModel::Gaussian::shared_ptr &noise_model) { + // Since noise models are Gaussian, we can get the logDeterminant using + // the same trick as in GaussianConditional + double logDetR = noise_model->R() + .diagonal() + .unaryExpr([](double x) { return log(x); }) + .sum(); + double logDeterminantSigma = -2.0 * logDetR; + + size_t n = noise_model->dim(); + constexpr double log2pi = 1.8378770664093454835606594728112; + return n * log2pi + logDeterminantSigma; } } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index f8a904539..e40713f46 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -55,8 +55,10 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { using sharedFactor = std::shared_ptr; - /// typedef for Decision Tree of Gaussian factors and log-constant. - using Factors = DecisionTree; + /// typedef for Decision Tree of Gaussian factors and arbitrary value. + using FactorValuePairs = DecisionTree; + /// typedef for Decision Tree of Gaussian factors. + using Factors = DecisionTree; private: /// Decision tree of Gaussian factors indexed by discrete keys. @@ -87,7 +89,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { */ HybridGaussianFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors); + const FactorValuePairs &factors); /** * @brief Construct a new HybridGaussianFactor object using a vector of @@ -102,7 +104,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { const DiscreteKeys &discreteKeys, const std::vector &factors) : HybridGaussianFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors)) {} + FactorValuePairs(discreteKeys, factors)) {} /// @} /// @name Testable @@ -118,7 +120,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// @{ /// Get the factor and scalar at a given discrete assignment. - GaussianFactorValuePair operator()(const DiscreteValues &assignment) const; + sharedFactor operator()(const DiscreteValues &assignment) const; /** * @brief Combine the Gaussian Factor Graphs in `sum` and `this` while @@ -173,4 +175,16 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { template <> struct traits : public Testable {}; +/** + * @brief Helper function to compute the sqrt(|2πΣ|) normalizer values + * for a Gaussian noise model. + * We compute this in the log-space for numerical accuracy. + * + * @param noise_model The Gaussian noise model + * whose normalizer we wish to compute. + * @return double + */ +GTSAM_EXPORT double ComputeLogNormalizer( + const noiseModel::Gaussian::shared_ptr &noise_model); + } // namespace gtsam From 0ee9aac434b82e8ba404ef4360e34b9e3706d558 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 14 Sep 2024 14:54:49 -0400 Subject: [PATCH 221/398] update other classes with correct types --- gtsam/hybrid/HybridGaussianConditional.cpp | 2 +- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 15 ++++++--------- 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 29e1434b1..eeeed7d13 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -219,7 +219,7 @@ std::shared_ptr HybridGaussianConditional::likelihood( const DiscreteKeys discreteParentKeys = discreteKeys(); const KeyVector continuousParentKeys = continuousParents(); - const HybridGaussianFactor::Factors likelihoods( + const HybridGaussianFactor::FactorValuePairs likelihoods( conditionals_, [&](const GaussianConditional::shared_ptr &conditional) -> GaussianFactorValuePair { diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 8dbf6bd2b..74091bf95 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -97,9 +97,7 @@ void HybridGaussianFactorGraph::printErrors( std::cout << "nullptr" << "\n"; } else { - auto [factor, val] = hgf->operator()(values.discrete()); - factor->print(ss.str(), keyFormatter); - std::cout << "value: " << val << std::endl; + hgf->operator()(values.discrete())->print(ss.str(), keyFormatter); std::cout << "error = " << factor->error(values) << std::endl; } } else if (auto hc = std::dynamic_pointer_cast(factor)) { @@ -263,11 +261,10 @@ discreteElimination(const HybridGaussianFactorGraph &factors, } else if (auto gmf = dynamic_pointer_cast(f)) { // Case where we have a HybridGaussianFactor with no continuous keys. // In this case, compute discrete probabilities. - auto logProbability = [&](const GaussianFactorValuePair &fv) -> double { - auto [factor, val] = fv; - double v = 0.5 * val * val; - if (!factor) return -v; - return -(factor->error(VectorValues()) + v); + auto logProbability = + [&](const GaussianFactor::shared_ptr &factor) -> double { + if (!factor) return 0.0; + return -factor->error(VectorValues()); }; AlgebraicDecisionTree logProbabilities = DecisionTree(gmf->factors(), logProbability); @@ -601,7 +598,7 @@ GaussianFactorGraph HybridGaussianFactorGraph::operator()( } else if (auto gc = std::dynamic_pointer_cast(f)) { gfg.push_back(gf); } else if (auto hgf = std::dynamic_pointer_cast(f)) { - gfg.push_back((*hgf)(assignment).first); + gfg.push_back((*hgf)(assignment)); } else if (auto hgc = dynamic_pointer_cast(f)) { gfg.push_back((*hgc)(assignment)); } else { From d5160f70060fa745d678e557c56fa3a331607522 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 14 Sep 2024 14:57:55 -0400 Subject: [PATCH 222/398] update tests --- gtsam/hybrid/tests/testHybridEstimation.cpp | 3 +-- gtsam/hybrid/tests/testHybridGaussianConditional.cpp | 2 +- gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp | 4 ++-- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index f7e1a6cd6..8f9875a9a 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -544,8 +544,7 @@ std::shared_ptr mixedVarianceFactor( auto func = [&](const Assignment& assignment, - const GaussianFactorValuePair& gfv) -> GaussianFactorValuePair { - auto [gf, val] = gfv; + const GaussianFactor::shared_ptr& gf) -> GaussianFactorValuePair { if (assignment.at(mode) != tight_index) { double factor_log_constant = -0.5 * d * log2pi + log(1.0 / noise_loose); diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index cfa7e5a5a..6156fee96 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -183,7 +183,7 @@ TEST(HybridGaussianConditional, Likelihood2) { // Check the detailed JacobianFactor calculation for mode==1. { // We have a JacobianFactor - const auto gf1 = (*likelihood)(assignment1).first; + const auto gf1 = (*likelihood)(assignment1); const auto jf1 = std::dynamic_pointer_cast(gf1); CHECK(jf1); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 69bc0dd58..122f905ff 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -734,8 +734,8 @@ TEST(HybridGaussianFactorGraph, assembleGraphTree) { // Expected decision tree with two factor graphs: // f(x0;mode=0)P(x0) and f(x0;mode=1)P(x0) GaussianFactorGraphTree expected{ - M(0), GaussianFactorGraph(std::vector{(*mixture)(d0).first, prior}), - GaussianFactorGraph(std::vector{(*mixture)(d1).first, prior})}; + M(0), GaussianFactorGraph(std::vector{(*mixture)(d0), prior}), + GaussianFactorGraph(std::vector{(*mixture)(d1), prior})}; EXPECT(assert_equal(expected(d0), actual(d0), 1e-5)); EXPECT(assert_equal(expected(d1), actual(d1), 1e-5)); From df7850494cd516c06f0e170eea56b27507a7f350 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 14 Sep 2024 15:07:27 -0400 Subject: [PATCH 223/398] fix testHybridEstimation test --- gtsam/hybrid/tests/testHybridEstimation.cpp | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 8f9875a9a..76e5880ee 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -542,9 +542,8 @@ std::shared_ptr mixedVarianceFactor( double logNormalizationConstant = log(1.0 / noise_tight); double logConstant = -0.5 * d * log2pi + logNormalizationConstant; - auto func = - [&](const Assignment& assignment, - const GaussianFactor::shared_ptr& gf) -> GaussianFactorValuePair { + auto func = [&](const Assignment& assignment, + const GaussianFactor::shared_ptr& gf) { if (assignment.at(mode) != tight_index) { double factor_log_constant = -0.5 * d * log2pi + log(1.0 / noise_loose); @@ -556,14 +555,19 @@ std::shared_ptr mixedVarianceFactor( } _gfg.emplace_shared(c); - return {std::make_shared(_gfg), 0.0}; + return std::make_shared(_gfg); } else { - return {dynamic_pointer_cast(gf), 0.0}; + return dynamic_pointer_cast(gf); } }; auto updated_components = gmf->factors().apply(func); + auto updated_pairs = HybridGaussianFactor::FactorValuePairs( + updated_components, + [](const GaussianFactor::shared_ptr& gf) -> GaussianFactorValuePair { + return {gf, 0.0}; + }); auto updated_gmf = std::make_shared( - gmf->continuousKeys(), gmf->discreteKeys(), updated_components); + gmf->continuousKeys(), gmf->discreteKeys(), updated_pairs); return updated_gmf; } From cfe2ad56bfb7ab377231ac4c47a10dd80d65887a Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 14 Sep 2024 15:09:17 -0400 Subject: [PATCH 224/398] fix printing tests --- gtsam/hybrid/tests/testHybridGaussianFactor.cpp | 2 -- gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp | 10 +--------- 2 files changed, 1 insertion(+), 11 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index ce649e521..8837e84a7 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -129,7 +129,6 @@ Hybrid [x1 x2; 1]{ ] b = [ 0 0 ] No noise model -value: 0 1 Leaf : A[x1] = [ @@ -142,7 +141,6 @@ value: 0 ] b = [ 0 0 ] No noise model -value: 0 } )"; diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 4f1a6fa46..fbf6983d2 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -442,7 +442,7 @@ TEST(HybridFactorGraph, Full_Elimination) { DiscreteFactorGraph discrete_fg; // TODO(Varun) Make this a function of HybridGaussianFactorGraph? - for (auto& factor : (*remainingFactorGraph_partial)) { + for (auto &factor : (*remainingFactorGraph_partial)) { auto df = dynamic_pointer_cast(factor); assert(df); discrete_fg.push_back(df); @@ -526,7 +526,6 @@ Hybrid [x0 x1; m0]{ ] b = [ -1 ] No noise model -value: 0 1 Leaf : A[x0] = [ @@ -537,7 +536,6 @@ value: 0 ] b = [ -0 ] No noise model -value: 0 } factor 2: @@ -553,7 +551,6 @@ Hybrid [x1 x2; m1]{ ] b = [ -1 ] No noise model -value: 0 1 Leaf : A[x1] = [ @@ -564,7 +561,6 @@ value: 0 ] b = [ -0 ] No noise model -value: 0 } factor 3: @@ -613,7 +609,6 @@ Hybrid [x0 x1; m0]{ ] b = [ -1 ] No noise model -value: 0 1 Leaf: A[x0] = [ @@ -624,7 +619,6 @@ value: 0 ] b = [ -0 ] No noise model -value: 0 } factor 2: @@ -639,7 +633,6 @@ Hybrid [x1 x2; m1]{ ] b = [ -1 ] No noise model -value: 0 1 Leaf: A[x1] = [ @@ -650,7 +643,6 @@ value: 0 ] b = [ -0 ] No noise model -value: 0 } factor 3: From 48e087e083842dd3dcbe8fad1a003849e7c4cb47 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 14 Sep 2024 15:14:02 -0400 Subject: [PATCH 225/398] fix error using value for HybridNonlinearFactor --- gtsam/hybrid/HybridGaussianFactor.h | 2 +- gtsam/hybrid/HybridNonlinearFactor.h | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index e40713f46..f52022348 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -119,7 +119,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// @name Standard API /// @{ - /// Get the factor and scalar at a given discrete assignment. + /// Get factor at a given discrete assignment. sharedFactor operator()(const DiscreteValues &assignment) const; /** diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 1bd25a6b1..c3f127f4a 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -138,7 +138,7 @@ class HybridNonlinearFactor : public HybridFactor { auto errorFunc = [continuousValues](const std::pair& f) { auto [factor, val] = f; - return factor->error(continuousValues) + val; + return factor->error(continuousValues) + (0.5 * val * val); }; DecisionTree result(factors_, errorFunc); return result; @@ -157,7 +157,7 @@ class HybridNonlinearFactor : public HybridFactor { auto [factor, val] = factors_(discreteValues); // Compute the error for the selected factor const double factorError = factor->error(continuousValues); - return factorError + val; + return factorError + (0.5 * val * val); } /** From 43e6bc6462c71845e085833adba1389857c07936 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 14 Sep 2024 15:15:52 -0400 Subject: [PATCH 226/398] cleaner specific factor linearization --- gtsam/hybrid/HybridNonlinearFactor.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index c3f127f4a..6cfce6bc0 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -236,7 +236,7 @@ class HybridNonlinearFactor : public HybridFactor { GaussianFactor::shared_ptr linearize( const Values& continuousValues, const DiscreteValues& discreteValues) const { - auto [factor, val] = factors_(discreteValues); + auto factor = factors_(discreteValues).first; return factor->linearize(continuousValues); } From a9013aad8e4df92a3d15ff1c01bb559dc2715391 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 15 Sep 2024 08:51:32 -0400 Subject: [PATCH 227/398] HybridNonlinearFactorGraph tests --- .../tests/testHybridNonlinearFactorGraph.cpp | 167 +++++++++++++++++- 1 file changed, 166 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index fbf6983d2..16098af7e 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -841,9 +841,174 @@ TEST(HybridFactorGraph, DefaultDecisionTree) { EXPECT_LONGS_EQUAL(1, remainingFactorGraph->size()); } +namespace test_relinearization { +/** + * @brief Create a Factor Graph by directly specifying all + * the factors instead of creating conditionals first. + * This way we can directly provide the likelihoods and + * then perform (re-)linearization. + * + * @param means The means of the GaussianMixtureFactor components. + * @param sigmas The covariances of the GaussianMixtureFactor components. + * @param m1 The discrete key. + * @param x0_measurement A measurement on X0 + * @return HybridGaussianFactorGraph + */ +static HybridNonlinearFactorGraph CreateFactorGraph( + const std::vector &means, const std::vector &sigmas, + DiscreteKey &m1, double x0_measurement) { + auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); + auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + + auto f0 = + std::make_shared>(X(0), X(1), means[0], model0); + auto f1 = + std::make_shared>(X(0), X(1), means[1], model1); + + // Create HybridNonlinearFactor + std::vector> factors{ + {f0, ComputeLogNormalizer(model0)}, {f1, ComputeLogNormalizer(model1)}}; + + HybridNonlinearFactor mixtureFactor({X(0), X(1)}, {m1}, factors); + + HybridNonlinearFactorGraph hfg; + hfg.push_back(mixtureFactor); + + hfg.push_back(PriorFactor(X(0), x0_measurement, prior_noise)); + + return hfg; +} +} // namespace test_relinearization + /* ************************************************************************* */ +/** + * @brief Test components with differing means but the same covariances. + * The factor graph is + * *-X1-*-X2 + * | + * M1 + */ +TEST(HybridNonlinearFactorGraph, DifferentMeans) { + using namespace test_relinearization; + + DiscreteKey m1(M(1), 2); + + Values values; + double x0 = 0.0, x1 = 1.75; + values.insert(X(0), x0); + values.insert(X(1), x1); + + std::vector means = {0.0, 2.0}, sigmas = {1e-0, 1e-0}; + + HybridNonlinearFactorGraph hfg = CreateFactorGraph(means, sigmas, m1, x0); + + { + auto bn = hfg.linearize(values)->eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(-1.75)}}, + DiscreteValues{{M(1), 0}}); + + EXPECT(assert_equal(expected, actual)); + + DiscreteValues dv0{{M(1), 0}}; + VectorValues cont0 = bn->optimize(dv0); + double error0 = bn->error(HybridValues(cont0, dv0)); + + // TODO(Varun) Perform importance sampling to estimate error? + + // regression + EXPECT_DOUBLES_EQUAL(0.69314718056, error0, 1e-9); + + DiscreteValues dv1{{M(1), 1}}; + VectorValues cont1 = bn->optimize(dv1); + double error1 = bn->error(HybridValues(cont1, dv1)); + EXPECT_DOUBLES_EQUAL(error0, error1, 1e-9); + } + + { + // Add measurement on x1 + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + hfg.push_back(PriorFactor(X(1), means[1], prior_noise)); + + auto bn = hfg.linearize(values)->eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(0.25)}}, + DiscreteValues{{M(1), 1}}); + + EXPECT(assert_equal(expected, actual)); + + { + DiscreteValues dv{{M(1), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(2.12692448787, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(0.126928487854, error, 1e-9); + } + } +} + +/* ************************************************************************* */ +/** + * @brief Test components with differing covariances but the same means. + * The factor graph is + * *-X1-*-X2 + * | + * M1 + */ +TEST_DISABLED(HybridNonlinearFactorGraph, DifferentCovariances) { + using namespace test_relinearization; + + DiscreteKey m1(M(1), 2); + + Values values; + double x0 = 1.0, x1 = 1.0; + values.insert(X(0), x0); + values.insert(X(1), x1); + + std::vector means = {0.0, 0.0}, sigmas = {1e2, 1e-2}; + + // Create FG with HybridNonlinearFactor and prior on X1 + HybridNonlinearFactorGraph hfg = CreateFactorGraph(means, sigmas, m1, x0); + // Linearize and eliminate + auto hbn = hfg.linearize(values)->eliminateSequential(); + + VectorValues cv; + cv.insert(X(0), Vector1(0.0)); + cv.insert(X(1), Vector1(0.0)); + + // Check that the error values at the MLE point μ. + AlgebraicDecisionTree errorTree = hbn->errorTree(cv); + + DiscreteValues dv0{{M(1), 0}}; + DiscreteValues dv1{{M(1), 1}}; + + // regression + EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); + EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); + + DiscreteConditional expected_m1(m1, "0.5/0.5"); + DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); + + EXPECT(assert_equal(expected_m1, actual_m1)); +} + +/* ************************************************************************* + */ int main() { TestResult tr; return TestRegistry::runAllTests(tr); } -/* ************************************************************************* */ +/* ************************************************************************* + */ From e2f1ad78a0129420b7bede7e8014ba58fefb308f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 15 Sep 2024 08:55:36 -0400 Subject: [PATCH 228/398] move ComputeLogNormalizer to NoiseModel.h --- gtsam/hybrid/HybridGaussianFactor.cpp | 16 ---------------- gtsam/hybrid/HybridGaussianFactor.h | 12 ------------ gtsam/linear/NoiseModel.cpp | 16 ++++++++++++++++ gtsam/linear/NoiseModel.h | 12 ++++++++++++ 4 files changed, 28 insertions(+), 28 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index f38dd6b84..bfebb064e 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -163,20 +163,4 @@ double HybridGaussianFactor::error(const HybridValues &values) const { return gf->error(values.continuous()); } -/* *******************************************************************************/ -double ComputeLogNormalizer( - const noiseModel::Gaussian::shared_ptr &noise_model) { - // Since noise models are Gaussian, we can get the logDeterminant using - // the same trick as in GaussianConditional - double logDetR = noise_model->R() - .diagonal() - .unaryExpr([](double x) { return log(x); }) - .sum(); - double logDeterminantSigma = -2.0 * logDetR; - - size_t n = noise_model->dim(); - constexpr double log2pi = 1.8378770664093454835606594728112; - return n * log2pi + logDeterminantSigma; -} - } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index f52022348..333c5d25b 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -175,16 +175,4 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { template <> struct traits : public Testable {}; -/** - * @brief Helper function to compute the sqrt(|2πΣ|) normalizer values - * for a Gaussian noise model. - * We compute this in the log-space for numerical accuracy. - * - * @param noise_model The Gaussian noise model - * whose normalizer we wish to compute. - * @return double - */ -GTSAM_EXPORT double ComputeLogNormalizer( - const noiseModel::Gaussian::shared_ptr &noise_model); - } // namespace gtsam diff --git a/gtsam/linear/NoiseModel.cpp b/gtsam/linear/NoiseModel.cpp index 7ea8e5a54..4936161f2 100644 --- a/gtsam/linear/NoiseModel.cpp +++ b/gtsam/linear/NoiseModel.cpp @@ -706,6 +706,22 @@ const RobustModel::shared_ptr &robust, const NoiseModel::shared_ptr noise){ return shared_ptr(new Robust(robust,noise)); } +/* *******************************************************************************/ +double ComputeLogNormalizer( + const noiseModel::Gaussian::shared_ptr& noise_model) { + // Since noise models are Gaussian, we can get the logDeterminant using + // the same trick as in GaussianConditional + double logDetR = noise_model->R() + .diagonal() + .unaryExpr([](double x) { return log(x); }) + .sum(); + double logDeterminantSigma = -2.0 * logDetR; + + size_t n = noise_model->dim(); + constexpr double log2pi = 1.8378770664093454835606594728112; + return n * log2pi + logDeterminantSigma; +} + /* ************************************************************************* */ } diff --git a/gtsam/linear/NoiseModel.h b/gtsam/linear/NoiseModel.h index 489f11e4c..831cfd7dd 100644 --- a/gtsam/linear/NoiseModel.h +++ b/gtsam/linear/NoiseModel.h @@ -751,6 +751,18 @@ namespace gtsam { template<> struct traits : public Testable {}; template<> struct traits : public Testable {}; + /** + * @brief Helper function to compute the sqrt(|2πΣ|) normalizer values + * for a Gaussian noise model. + * We compute this in the log-space for numerical accuracy. + * + * @param noise_model The Gaussian noise model + * whose normalizer we wish to compute. + * @return double + */ + GTSAM_EXPORT double ComputeLogNormalizer( + const noiseModel::Gaussian::shared_ptr& noise_model); + } //\ namespace gtsam From 3eb91a4ba06cf0bf3a0d1a4af40f210d64b13bca Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 15 Sep 2024 09:16:31 -0400 Subject: [PATCH 229/398] address some PR comments --- gtsam/hybrid/HybridNonlinearFactor.h | 2 +- gtsam/hybrid/tests/testHybridGaussianFactor.cpp | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 6cfce6bc0..d8e3990f0 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -110,7 +110,7 @@ class HybridNonlinearFactor : public HybridFactor { std::inserter(factor_keys_set, factor_keys_set.end())); if (auto nf = std::dynamic_pointer_cast(f)) { - nonlinear_factors.push_back(std::make_pair(nf, val)); + nonlinear_factors.emplace_back(nf, val); } else { throw std::runtime_error( "Factors passed into HybridNonlinearFactor need to be nonlinear!"); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 8837e84a7..11226e19d 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -616,15 +616,15 @@ TEST(HybridGaussianFactor, TwoStateModel2) { /* ************************************************************************* */ /** - * Test a model P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). + * Test a model p(z0|x0)p(x1|x0,m1)p(z1|x1)p(m1). * - * P(x1|x0,m1) has the same means but different covariances. + * p(x1|x0,m1) has the same means but different covariances. * * Converting to a factor graph gives us - * ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) + * ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)p(m1) * * If we only have a measurement on z0, then - * the P(m1) should be 0.5/0.5. + * the p(m1) should be 0.5/0.5. * Getting a measurement on z1 gives use more information. */ TEST(HybridGaussianFactor, TwoStateModel3) { From c2dc1fcdb2946dec0ba0aff537c2c6694912ef6e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 15 Sep 2024 09:16:51 -0400 Subject: [PATCH 230/398] NonlinearFactorValuePair typedef --- gtsam/hybrid/HybridNonlinearFactor.h | 8 +++++--- gtsam/hybrid/tests/Switching.h | 2 +- gtsam/hybrid/tests/testHybridBayesNet.cpp | 4 ++-- gtsam/hybrid/tests/testHybridEstimation.cpp | 12 ++++++------ gtsam/hybrid/tests/testHybridNonlinearFactor.cpp | 6 ++---- .../hybrid/tests/testHybridNonlinearFactorGraph.cpp | 3 ++- 6 files changed, 18 insertions(+), 17 deletions(-) diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index d8e3990f0..742b4c162 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -33,6 +33,9 @@ namespace gtsam { +/// Alias for a NonlinearFactor shared pointer and double scalar pair. +using NonlinearFactorValuePair = std::pair; + /** * @brief Implementation of a discrete conditional mixture factor. * @@ -55,7 +58,7 @@ class HybridNonlinearFactor : public HybridFactor { * @brief typedef for DecisionTree which has Keys as node labels and * pairs of NonlinearFactor & an arbitrary scalar as leaf nodes. */ - using Factors = DecisionTree>; + using Factors = DecisionTree; private: /// Decision tree of Gaussian factors indexed by discrete keys. @@ -100,8 +103,7 @@ class HybridNonlinearFactor : public HybridFactor { const std::vector, double>>& factors, bool normalized = false) : Base(keys, discreteKeys), normalized_(normalized) { - std::vector> - nonlinear_factors; + std::vector nonlinear_factors; KeySet continuous_keys_set(keys.begin(), keys.end()); KeySet factor_keys_set; for (auto&& [f, val] : factors) { diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index f799168f2..9b7c44d02 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -162,7 +162,7 @@ struct Switching { for (size_t k = 0; k < K - 1; k++) { KeyVector keys = {X(k), X(k + 1)}; auto motion_models = motionModels(k, between_sigma); - std::vector> components; + std::vector components; for (auto &&f : motion_models) { components.push_back( {std::dynamic_pointer_cast(f), 0.0}); diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index bf3301d2f..cf4231dba 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -387,8 +387,8 @@ TEST(HybridBayesNet, Sampling) { std::make_shared>(X(0), X(1), 0, noise_model); auto one_motion = std::make_shared>(X(0), X(1), 1, noise_model); - std::vector> factors = { - {zero_motion, 0.0}, {one_motion, 0.0}}; + std::vector factors = {{zero_motion, 0.0}, + {one_motion, 0.0}}; nfg.emplace_shared>(X(0), 0.0, noise_model); nfg.emplace_shared( KeyVector{X(0), X(1)}, DiscreteKeys{DiscreteKey(M(0), 2)}, factors); diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 76e5880ee..7204d819d 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -435,8 +435,8 @@ static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() { std::make_shared>(X(0), X(1), 0, noise_model); const auto one_motion = std::make_shared>(X(0), X(1), 1, noise_model); - std::vector> components = { - {zero_motion, 0.0}, {one_motion, 0.0}}; + std::vector components = {{zero_motion, 0.0}, + {one_motion, 0.0}}; nfg.emplace_shared(KeyVector{X(0), X(1)}, DiscreteKeys{m}, components); @@ -595,8 +595,8 @@ TEST(HybridEstimation, ModeSelection) { model1 = std::make_shared( X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight)); - std::vector> components = { - {model0, 0.0}, {model1, 0.0}}; + std::vector components = {{model0, 0.0}, + {model1, 0.0}}; KeyVector keys = {X(0), X(1)}; HybridNonlinearFactor mf(keys, modes, components); @@ -687,8 +687,8 @@ TEST(HybridEstimation, ModeSelection2) { model1 = std::make_shared>( X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_tight)); - std::vector> components = { - {model0, 0.0}, {model1, 0.0}}; + std::vector components = {{model0, 0.0}, + {model1, 0.0}}; KeyVector keys = {X(0), X(1)}; HybridNonlinearFactor mf(keys, modes, components); diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp index fa31fbe7d..a4c1b4cc7 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp @@ -58,8 +58,7 @@ TEST(HybridNonlinearFactor, Printing) { std::make_shared>(X(1), X(2), between0, model); auto f1 = std::make_shared>(X(1), X(2), between1, model); - std::vector> factors{ - {f0, 0.0}, {f1, 0.0}}; + std::vector factors{{f0, 0.0}, {f1, 0.0}}; HybridNonlinearFactor mixtureFactor({X(1), X(2)}, {m1}, factors); @@ -87,8 +86,7 @@ static HybridNonlinearFactor getHybridNonlinearFactor() { std::make_shared>(X(1), X(2), between0, model); auto f1 = std::make_shared>(X(1), X(2), between1, model); - std::vector> factors{ - {f0, 0.0}, {f1, 0.0}}; + std::vector factors{{f0, 0.0}, {f1, 0.0}}; return HybridNonlinearFactor({X(1), X(2)}, {m1}, factors); } diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 16098af7e..00d0d3a39 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -27,6 +27,7 @@ #include #include #include +#include #include #include #include @@ -867,7 +868,7 @@ static HybridNonlinearFactorGraph CreateFactorGraph( std::make_shared>(X(0), X(1), means[1], model1); // Create HybridNonlinearFactor - std::vector> factors{ + std::vector factors{ {f0, ComputeLogNormalizer(model0)}, {f1, ComputeLogNormalizer(model1)}}; HybridNonlinearFactor mixtureFactor({X(0), X(1)}, {m1}, factors); From ccea10410debc654bb49fd3d5f09cc3a788bb28b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 15 Sep 2024 09:17:05 -0400 Subject: [PATCH 231/398] put ComputeLogNormalizer in the correct namespace --- gtsam/linear/NoiseModel.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gtsam/linear/NoiseModel.cpp b/gtsam/linear/NoiseModel.cpp index 4936161f2..29222a106 100644 --- a/gtsam/linear/NoiseModel.cpp +++ b/gtsam/linear/NoiseModel.cpp @@ -706,6 +706,9 @@ const RobustModel::shared_ptr &robust, const NoiseModel::shared_ptr noise){ return shared_ptr(new Robust(robust,noise)); } +/* ************************************************************************* */ +} // namespace noiseModel + /* *******************************************************************************/ double ComputeLogNormalizer( const noiseModel::Gaussian::shared_ptr& noise_model) { @@ -722,7 +725,4 @@ double ComputeLogNormalizer( return n * log2pi + logDeterminantSigma; } -/* ************************************************************************* */ - -} } // gtsam From 6b9fb5b22f4c4aee2113840d67a7a1b71990f764 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 15 Sep 2024 09:17:51 -0400 Subject: [PATCH 232/398] fix python wrapper --- gtsam/hybrid/hybrid.i | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index 686ce60ac..dcf0fce04 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -77,7 +77,8 @@ class HybridGaussianFactor : gtsam::HybridFactor { HybridGaussianFactor( const gtsam::KeyVector& continuousKeys, const gtsam::DiscreteKeys& discreteKeys, - const std::vector& factorsList); + const std::vector>& + factorsList); void print(string s = "HybridGaussianFactor\n", const gtsam::KeyFormatter& keyFormatter = @@ -242,14 +243,14 @@ class HybridNonlinearFactorGraph { class HybridNonlinearFactor : gtsam::HybridFactor { HybridNonlinearFactor( const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys, - const gtsam::DecisionTree& factors, + const gtsam::DecisionTree< + gtsam::Key, std::pair>& factors, bool normalized = false); - template - HybridNonlinearFactor(const gtsam::KeyVector& keys, - const gtsam::DiscreteKeys& discreteKeys, - const std::vector& factors, - bool normalized = false); + HybridNonlinearFactor( + const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys, + const std::vector>& factors, + bool normalized = false); double error(const gtsam::Values& continuousValues, const gtsam::DiscreteValues& discreteValues) const; From 1b7435361e85450a4d4339b7b9869cc74cdc2fad Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 15 Sep 2024 09:30:59 -0400 Subject: [PATCH 233/398] leverage hiding inside HybridGaussianFactor --- gtsam/hybrid/HybridGaussianConditional.cpp | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index eeeed7d13..2b2062e6f 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -229,15 +229,10 @@ std::shared_ptr HybridGaussianConditional::likelihood( if (Cgm_Kgcm == 0.0) { return {likelihood_m, 0.0}; } else { - // Add a constant factor to the likelihood in case the noise models + // Add a constant to the likelihood in case the noise models // are not all equal. - GaussianFactorGraph gfg; - gfg.push_back(likelihood_m); - Vector c(1); - c << std::sqrt(2.0 * Cgm_Kgcm); - auto constantFactor = std::make_shared(c); - gfg.push_back(constantFactor); - return {std::make_shared(gfg), 0.0}; + double c = 2.0 * Cgm_Kgcm; + return {likelihood_m, c}; } }); return std::make_shared( From 1c74da26f455a3f12b692eecfc5142ab463449a5 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 15 Sep 2024 09:46:33 -0400 Subject: [PATCH 234/398] fix python tests --- python/gtsam/tests/test_HybridFactorGraph.py | 4 ++-- .../gtsam/tests/test_HybridNonlinearFactorGraph.py | 13 ++++++------- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index f82665c49..adbda59ce 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -37,7 +37,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model) jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model) - gmf = HybridGaussianFactor([X(0)], dk, [jf1, jf2]) + gmf = HybridGaussianFactor([X(0)], dk, [(jf1, 0), (jf2, 0)]) hfg = HybridGaussianFactorGraph() hfg.push_back(jf1) @@ -64,7 +64,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model) jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model) - gmf = HybridGaussianFactor([X(0)], dk, [jf1, jf2]) + gmf = HybridGaussianFactor([X(0)], dk, [(jf1, 0), (jf2, 0)]) hfg = HybridGaussianFactorGraph() hfg.push_back(jf1) diff --git a/python/gtsam/tests/test_HybridNonlinearFactorGraph.py b/python/gtsam/tests/test_HybridNonlinearFactorGraph.py index 3a659c4e8..247f83c19 100644 --- a/python/gtsam/tests/test_HybridNonlinearFactorGraph.py +++ b/python/gtsam/tests/test_HybridNonlinearFactorGraph.py @@ -35,13 +35,12 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): nlfg.push_back( PriorFactorPoint3(2, Point3(1, 2, 3), noiseModel.Diagonal.Variances([0.5, 0.5, 0.5]))) - nlfg.push_back( - gtsam.HybridNonlinearFactor([1], dk, [ - PriorFactorPoint3(1, Point3(0, 0, 0), - noiseModel.Unit.Create(3)), - PriorFactorPoint3(1, Point3(1, 2, 1), - noiseModel.Unit.Create(3)) - ])) + + factors = [(PriorFactorPoint3(1, Point3(0, 0, 0), + noiseModel.Unit.Create(3)), 0.0), + (PriorFactorPoint3(1, Point3(1, 2, 1), + noiseModel.Unit.Create(3)), 0.0)] + nlfg.push_back(gtsam.HybridNonlinearFactor([1], dk, factors)) nlfg.push_back(gtsam.DecisionTreeFactor((10, 2), "1 3")) values = gtsam.Values() values.insert_point3(1, Point3(0, 0, 0)) From 336b4947ad6c3dbf59ad91366b7275cb60949ecc Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 15 Sep 2024 15:36:22 -0400 Subject: [PATCH 235/398] fixes --- gtsam/hybrid/HybridGaussianConditional.cpp | 20 +++++++++++++++++++- gtsam/hybrid/HybridGaussianFactor.cpp | 16 ---------------- gtsam/hybrid/HybridGaussianFactor.h | 12 ------------ 3 files changed, 19 insertions(+), 29 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index bb27d3971..efa771c12 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -224,7 +224,24 @@ std::shared_ptr HybridGaussianConditional::likelihood( [&](const GaussianConditional::shared_ptr &conditional) -> GaussianFactorValuePair { const auto likelihood_m = conditional->likelihood(given); - s.insert(discreteKeys.begin(), discreteKeys.end()); + const double Cgm_Kgcm = + logConstant_ - conditional->logNormalizationConstant(); + if (Cgm_Kgcm == 0.0) { + return {likelihood_m, 0.0}; + } else { + // Add a constant to the likelihood in case the noise models + // are not all equal. + double c = 2.0 * Cgm_Kgcm; + return {likelihood_m, c}; + } + }); + return std::make_shared( + continuousParentKeys, discreteParentKeys, likelihoods); +} + +/* ************************************************************************* */ +std::set DiscreteKeysAsSet(const DiscreteKeys &discreteKeys) { + std::set s(discreteKeys.begin(), discreteKeys.end()); return s; } @@ -237,6 +254,7 @@ std::shared_ptr HybridGaussianConditional::likelihood( * const Assignment &, const GaussianConditional::shared_ptr &)> */ std::function &, const GaussianConditional::shared_ptr &)> HybridGaussianConditional::prunerFunc(const DecisionTreeFactor &discreteProbs) { // Get the discrete keys as sets for the decision tree // and the gaussian mixture. diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index f38dd6b84..bfebb064e 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -163,20 +163,4 @@ double HybridGaussianFactor::error(const HybridValues &values) const { return gf->error(values.continuous()); } -/* *******************************************************************************/ -double ComputeLogNormalizer( - const noiseModel::Gaussian::shared_ptr &noise_model) { - // Since noise models are Gaussian, we can get the logDeterminant using - // the same trick as in GaussianConditional - double logDetR = noise_model->R() - .diagonal() - .unaryExpr([](double x) { return log(x); }) - .sum(); - double logDeterminantSigma = -2.0 * logDetR; - - size_t n = noise_model->dim(); - constexpr double log2pi = 1.8378770664093454835606594728112; - return n * log2pi + logDeterminantSigma; -} - } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index f52022348..333c5d25b 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -175,16 +175,4 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { template <> struct traits : public Testable {}; -/** - * @brief Helper function to compute the sqrt(|2πΣ|) normalizer values - * for a Gaussian noise model. - * We compute this in the log-space for numerical accuracy. - * - * @param noise_model The Gaussian noise model - * whose normalizer we wish to compute. - * @return double - */ -GTSAM_EXPORT double ComputeLogNormalizer( - const noiseModel::Gaussian::shared_ptr &noise_model); - } // namespace gtsam From de68aecca5e894c3ddedc8f3e691cfc44c70cc1e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 15 Sep 2024 15:45:14 -0400 Subject: [PATCH 236/398] fix tests --- .../hybrid/tests/testHybridGaussianFactor.cpp | 52 +++++++++---------- 1 file changed, 25 insertions(+), 27 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 44e60d451..f2248510e 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -221,8 +221,9 @@ double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, return 1 / (1 + e); }; -static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, - double sigma0, double sigma1) { +static HybridBayesNet GetHybridGaussianConditionalModel(double mu0, double mu1, + double sigma0, + double sigma1) { DiscreteKey m(M(0), 2); Key z = Z(0); @@ -254,7 +255,7 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, * The resulting factor graph should eliminate to a Bayes net * which represents a sigmoid function. */ -TEST(HybridGaussianFactor, GaussianMixtureModel) { +TEST(HybridGaussianFactor, HybridGaussianConditionalModel) { using namespace test_gmm; double mu0 = 1.0, mu1 = 3.0; @@ -263,7 +264,7 @@ TEST(HybridGaussianFactor, GaussianMixtureModel) { DiscreteKey m(M(0), 2); Key z = Z(0); - auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma, sigma); + auto hbn = GetHybridGaussianConditionalModel(mu0, mu1, sigma, sigma); // The result should be a sigmoid. // So should be P(m=1|z) = 0.5 at z=3.0 - 1.0=2.0 @@ -326,7 +327,7 @@ TEST(HybridGaussianFactor, GaussianMixtureModel) { * which represents a Gaussian-like function * where m1>m0 close to 3.1333. */ -TEST(HybridGaussianFactor, GaussianMixtureModel2) { +TEST(HybridGaussianFactor, HybridGaussianConditionalModel2) { using namespace test_gmm; double mu0 = 1.0, mu1 = 3.0; @@ -335,7 +336,7 @@ TEST(HybridGaussianFactor, GaussianMixtureModel2) { DiscreteKey m(M(0), 2); Key z = Z(0); - auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma0, sigma1); + auto hbn = GetHybridGaussianConditionalModel(mu0, mu1, sigma0, sigma1); double m1_high = 3.133, lambda = 4; { @@ -393,7 +394,7 @@ namespace test_two_state_estimation { DiscreteKey m1(M(1), 2); -void addMeasurement(HybridBayesNet& hbn, Key z_key, Key x_key, double sigma) { +void addMeasurement(HybridBayesNet &hbn, Key z_key, Key x_key, double sigma) { auto measurement_model = noiseModel::Isotropic::Sigma(1, sigma); hbn.emplace_shared(z_key, Vector1(0.0), I_1x1, x_key, -I_1x1, measurement_model); @@ -414,7 +415,7 @@ static HybridGaussianConditional::shared_ptr CreateHybridMotionModel( /// Create two state Bayes network with 1 or two measurement models HybridBayesNet CreateBayesNet( - const HybridGaussianConditional::shared_ptr& hybridMotionModel, + const HybridGaussianConditional::shared_ptr &hybridMotionModel, bool add_second_measurement = false) { HybridBayesNet hbn; @@ -437,9 +438,9 @@ HybridBayesNet CreateBayesNet( /// Approximate the discrete marginal P(m1) using importance sampling std::pair approximateDiscreteMarginal( - const HybridBayesNet& hbn, - const HybridGaussianConditional::shared_ptr& hybridMotionModel, - const VectorValues& given, size_t N = 100000) { + const HybridBayesNet &hbn, + const HybridGaussianConditional::shared_ptr &hybridMotionModel, + const VectorValues &given, size_t N = 100000) { /// Create importance sampling network q(x0,x1,m) = p(x1|x0,m1) q(x0) P(m1), /// using q(x0) = N(z0, sigmaQ) to sample x0. HybridBayesNet q; @@ -758,7 +759,7 @@ static HybridGaussianFactorGraph GetFactorGraphFromBayesNet( auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]); auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - // GaussianMixtureFactor component factors + // HybridGaussianFactor component factors auto f0 = std::make_shared>(X(0), X(1), means[0], model0); auto f1 = @@ -783,8 +784,8 @@ static HybridGaussianFactorGraph GetFactorGraphFromBayesNet( std::make_shared(terms0, 1, -d0, model0), std::make_shared(terms1, 1, -d1, model1)}; gtsam::HybridBayesNet bn; - bn.emplace_shared(KeyVector{Z(1)}, KeyVector{X(0), X(1)}, - DiscreteKeys{m1}, conditionals); + bn.emplace_shared( + KeyVector{Z(1)}, KeyVector{X(0), X(1)}, DiscreteKeys{m1}, conditionals); // Create FG via toFactorGraph gtsam::VectorValues measurements; @@ -812,7 +813,7 @@ static HybridGaussianFactorGraph GetFactorGraphFromBayesNet( * P(Z1 | X1, X2, M1) has 2 conditionals each for the binary * mode m1. */ -TEST(GaussianMixtureFactor, FactorGraphFromBayesNet) { +TEST(HybridGaussianFactor, FactorGraphFromBayesNet) { DiscreteKey m1(M(1), 2); Values values; @@ -889,8 +890,8 @@ namespace test_direct_factor_graph { * then perform linearization. * * @param values Initial values to linearize around. - * @param means The means of the GaussianMixtureFactor components. - * @param sigmas The covariances of the GaussianMixtureFactor components. + * @param means The means of the HybridGaussianFactor components. + * @param sigmas The covariances of the HybridGaussianFactor components. * @param m1 The discrete key. * @return HybridGaussianFactorGraph */ @@ -908,13 +909,10 @@ static HybridGaussianFactorGraph CreateFactorGraph( std::make_shared>(X(0), X(1), means[1], model1) ->linearize(values); - // Create GaussianMixtureFactor - std::vector factors{f0, f1}; - AlgebraicDecisionTree logNormalizers( - {m1}, std::vector{ComputeLogNormalizer(model0), - ComputeLogNormalizer(model1)}); - GaussianMixtureFactor mixtureFactor({X(0), X(1)}, {m1}, factors, - logNormalizers); + // Create HybridGaussianFactor + std::vector factors{ + {f0, ComputeLogNormalizer(model0)}, {f1, ComputeLogNormalizer(model1)}}; + HybridGaussianFactor mixtureFactor({X(0), X(1)}, {m1}, factors); HybridGaussianFactorGraph hfg; hfg.push_back(mixtureFactor); @@ -934,7 +932,7 @@ static HybridGaussianFactorGraph CreateFactorGraph( * | * M1 */ -TEST(GaussianMixtureFactor, DifferentMeansFG) { +TEST(HybridGaussianFactor, DifferentMeansFG) { using namespace test_direct_factor_graph; DiscreteKey m1(M(1), 2); @@ -1009,7 +1007,7 @@ TEST(GaussianMixtureFactor, DifferentMeansFG) { * | * M1 */ -TEST(GaussianMixtureFactor, DifferentCovariancesFG) { +TEST(HybridGaussianFactor, DifferentCovariancesFG) { using namespace test_direct_factor_graph; DiscreteKey m1(M(1), 2); @@ -1021,7 +1019,7 @@ TEST(GaussianMixtureFactor, DifferentCovariancesFG) { std::vector means = {0.0, 0.0}, sigmas = {1e2, 1e-2}; - // Create FG with GaussianMixtureFactor and prior on X1 + // Create FG with HybridGaussianFactor and prior on X1 HybridGaussianFactorGraph mixture_fg = CreateFactorGraph(values, means, sigmas, m1); From 3a7a0b84fe73a1c20845b84a78dd7d9984a3fc81 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 15 Sep 2024 17:26:33 -0400 Subject: [PATCH 237/398] use enum to categorize HybridFactor --- gtsam/hybrid/HybridFactor.cpp | 39 ++++++++++++++++------- gtsam/hybrid/HybridFactor.h | 18 +++++------ gtsam/hybrid/tests/testHybridBayesNet.cpp | 10 +++--- 3 files changed, 42 insertions(+), 25 deletions(-) diff --git a/gtsam/hybrid/HybridFactor.cpp b/gtsam/hybrid/HybridFactor.cpp index b25e97f05..89b1943cd 100644 --- a/gtsam/hybrid/HybridFactor.cpp +++ b/gtsam/hybrid/HybridFactor.cpp @@ -50,31 +50,37 @@ DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1, /* ************************************************************************ */ HybridFactor::HybridFactor(const KeyVector &keys) - : Base(keys), isContinuous_(true), continuousKeys_(keys) {} + : Base(keys), + category_(HybridCategory::Continuous), + continuousKeys_(keys) {} /* ************************************************************************ */ HybridFactor::HybridFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys) : Base(CollectKeys(continuousKeys, discreteKeys)), - isDiscrete_((continuousKeys.size() == 0) && (discreteKeys.size() != 0)), - isContinuous_((continuousKeys.size() != 0) && (discreteKeys.size() == 0)), - isHybrid_((continuousKeys.size() != 0) && (discreteKeys.size() != 0)), discreteKeys_(discreteKeys), - continuousKeys_(continuousKeys) {} + continuousKeys_(continuousKeys) { + if ((continuousKeys.size() == 0) && (discreteKeys.size() != 0)) { + category_ = HybridCategory::Discrete; + } else if ((continuousKeys.size() != 0) && (discreteKeys.size() == 0)) { + category_ = HybridCategory::Continuous; + } else { + category_ = HybridCategory::Hybrid; + } +} /* ************************************************************************ */ HybridFactor::HybridFactor(const DiscreteKeys &discreteKeys) : Base(CollectKeys({}, discreteKeys)), - isDiscrete_(true), + category_(HybridCategory::Discrete), discreteKeys_(discreteKeys), continuousKeys_({}) {} /* ************************************************************************ */ bool HybridFactor::equals(const HybridFactor &lf, double tol) const { const This *e = dynamic_cast(&lf); - return e != nullptr && Base::equals(*e, tol) && - isDiscrete_ == e->isDiscrete_ && isContinuous_ == e->isContinuous_ && - isHybrid_ == e->isHybrid_ && continuousKeys_ == e->continuousKeys_ && + return e != nullptr && Base::equals(*e, tol) && category_ == e->category_ && + continuousKeys_ == e->continuousKeys_ && discreteKeys_ == e->discreteKeys_; } @@ -82,9 +88,18 @@ bool HybridFactor::equals(const HybridFactor &lf, double tol) const { void HybridFactor::print(const std::string &s, const KeyFormatter &formatter) const { std::cout << (s.empty() ? "" : s + "\n"); - if (isContinuous_) std::cout << "Continuous "; - if (isDiscrete_) std::cout << "Discrete "; - if (isHybrid_) std::cout << "Hybrid "; + switch (category_) { + case HybridCategory::Continuous: + std::cout << "Continuous "; + break; + case HybridCategory::Discrete: + std::cout << "Discrete "; + break; + case HybridCategory::Hybrid: + std::cout << "Hybrid "; + break; + } + std::cout << "["; for (size_t c = 0; c < continuousKeys_.size(); c++) { std::cout << formatter(continuousKeys_.at(c)); diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index c66116512..2cc7453f4 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -41,6 +41,9 @@ KeyVector CollectKeys(const KeyVector &keys1, const KeyVector &keys2); DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1, const DiscreteKeys &key2); +/// Enum to help with categorizing hybrid factors. +enum class HybridCategory { Discrete, Continuous, Hybrid }; + /** * Base class for *truly* hybrid probabilistic factors * @@ -53,9 +56,8 @@ DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1, */ class GTSAM_EXPORT HybridFactor : public Factor { private: - bool isDiscrete_ = false; - bool isContinuous_ = false; - bool isHybrid_ = false; + /// Record what category of HybridFactor this is. + HybridCategory category_; protected: // Set of DiscreteKeys for this factor. @@ -116,13 +118,13 @@ class GTSAM_EXPORT HybridFactor : public Factor { /// @{ /// True if this is a factor of discrete variables only. - bool isDiscrete() const { return isDiscrete_; } + bool isDiscrete() const { return category_ == HybridCategory::Discrete; } /// True if this is a factor of continuous variables only. - bool isContinuous() const { return isContinuous_; } + bool isContinuous() const { return category_ == HybridCategory::Continuous; } /// True is this is a Discrete-Continuous factor. - bool isHybrid() const { return isHybrid_; } + bool isHybrid() const { return category_ == HybridCategory::Hybrid; } /// Return the number of continuous variables in this factor. size_t nrContinuous() const { return continuousKeys_.size(); } @@ -142,9 +144,7 @@ class GTSAM_EXPORT HybridFactor : public Factor { template void serialize(ARCHIVE &ar, const unsigned int /*version*/) { ar &BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); - ar &BOOST_SERIALIZATION_NVP(isDiscrete_); - ar &BOOST_SERIALIZATION_NVP(isContinuous_); - ar &BOOST_SERIALIZATION_NVP(isHybrid_); + ar &BOOST_SERIALIZATION_NVP(category_); ar &BOOST_SERIALIZATION_NVP(discreteKeys_); ar &BOOST_SERIALIZATION_NVP(continuousKeys_); } diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index cf4231dba..99487a84a 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -387,11 +387,13 @@ TEST(HybridBayesNet, Sampling) { std::make_shared>(X(0), X(1), 0, noise_model); auto one_motion = std::make_shared>(X(0), X(1), 1, noise_model); - std::vector factors = {{zero_motion, 0.0}, - {one_motion, 0.0}}; + + DiscreteKeys discreteKeys{DiscreteKey(M(0), 2)}; + HybridNonlinearFactor::Factors factors( + discreteKeys, {{zero_motion, 0.0}, {one_motion, 0.0}}); nfg.emplace_shared>(X(0), 0.0, noise_model); - nfg.emplace_shared( - KeyVector{X(0), X(1)}, DiscreteKeys{DiscreteKey(M(0), 2)}, factors); + nfg.emplace_shared(KeyVector{X(0), X(1)}, discreteKeys, + factors); DiscreteKey mode(M(0), 2); nfg.emplace_shared(mode, "1/1"); From 97eb6bc8b9cb758cb782c1c1eb5011f9845a33f6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 16 Sep 2024 13:28:29 -0400 Subject: [PATCH 238/398] renaming --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 74091bf95..362150745 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -114,10 +114,11 @@ void HybridGaussianFactorGraph::printErrors( << "\n"; } else { // Is hybrid - auto mixtureComponent = + auto conditionalComponent = hc->asMixture()->operator()(values.discrete()); - mixtureComponent->print(ss.str(), keyFormatter); - std::cout << "error = " << mixtureComponent->error(values) << "\n"; + conditionalComponent->print(ss.str(), keyFormatter); + std::cout << "error = " << conditionalComponent->error(values) + << "\n"; } } } else if (auto gf = std::dynamic_pointer_cast(factor)) { @@ -411,10 +412,10 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // Create the HybridGaussianConditional from the conditionals HybridGaussianConditional::Conditionals conditionals( eliminationResults, [](const Result &pair) { return pair.first; }); - auto gaussianMixture = std::make_shared( + auto hybridGaussian = std::make_shared( frontalKeys, continuousSeparator, discreteSeparator, conditionals); - return {std::make_shared(gaussianMixture), newFactor}; + return {std::make_shared(hybridGaussian), newFactor}; } /* ************************************************************************ @@ -465,7 +466,7 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, // Now we will need to know how to retrieve the corresponding continuous // densities for the assignment (c1,c2,c3) (OR (c2,c3,c1), note there is NO // defined order!). We also need to consider when there is pruning. Two - // mixture factors could have different pruning patterns - one could have + // hybrid factors could have different pruning patterns - one could have // (c1=0,c2=1) pruned, and another could have (c2=0,c3=1) pruned, and this // creates a big problem in how to identify the intersection of non-pruned // branches. From 4302ee33c96d5179f00394903869f80409246713 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 16 Sep 2024 13:30:07 -0400 Subject: [PATCH 239/398] make None the default HybridCategory --- gtsam/hybrid/HybridFactor.cpp | 29 ++++++++++++++++++++--------- gtsam/hybrid/HybridFactor.h | 4 ++-- 2 files changed, 22 insertions(+), 11 deletions(-) diff --git a/gtsam/hybrid/HybridFactor.cpp b/gtsam/hybrid/HybridFactor.cpp index 89b1943cd..5582166a3 100644 --- a/gtsam/hybrid/HybridFactor.cpp +++ b/gtsam/hybrid/HybridFactor.cpp @@ -54,20 +54,28 @@ HybridFactor::HybridFactor(const KeyVector &keys) category_(HybridCategory::Continuous), continuousKeys_(keys) {} +/* ************************************************************************ */ +HybridCategory GetCategory(const KeyVector &continuousKeys, + const DiscreteKeys &discreteKeys) { + if ((continuousKeys.size() == 0) && (discreteKeys.size() != 0)) { + return HybridCategory::Discrete; + } else if ((continuousKeys.size() != 0) && (discreteKeys.size() == 0)) { + return HybridCategory::Continuous; + } else if ((continuousKeys.size() != 0) && (discreteKeys.size() != 0)) { + return HybridCategory::Hybrid; + } else { + // Case where we have no keys. Should never happen. + return HybridCategory::None; + } +} + /* ************************************************************************ */ HybridFactor::HybridFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys) : Base(CollectKeys(continuousKeys, discreteKeys)), + category_(GetCategory(continuousKeys, discreteKeys)), discreteKeys_(discreteKeys), - continuousKeys_(continuousKeys) { - if ((continuousKeys.size() == 0) && (discreteKeys.size() != 0)) { - category_ = HybridCategory::Discrete; - } else if ((continuousKeys.size() != 0) && (discreteKeys.size() == 0)) { - category_ = HybridCategory::Continuous; - } else { - category_ = HybridCategory::Hybrid; - } -} + continuousKeys_(continuousKeys) {} /* ************************************************************************ */ HybridFactor::HybridFactor(const DiscreteKeys &discreteKeys) @@ -98,6 +106,9 @@ void HybridFactor::print(const std::string &s, case HybridCategory::Hybrid: std::cout << "Hybrid "; break; + case HybridCategory::None: + std::cout << "None "; + break; } std::cout << "["; diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index 2cc7453f4..d0b9bbabe 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -42,7 +42,7 @@ DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1, const DiscreteKeys &key2); /// Enum to help with categorizing hybrid factors. -enum class HybridCategory { Discrete, Continuous, Hybrid }; +enum class HybridCategory { None, Discrete, Continuous, Hybrid }; /** * Base class for *truly* hybrid probabilistic factors @@ -57,7 +57,7 @@ enum class HybridCategory { Discrete, Continuous, Hybrid }; class GTSAM_EXPORT HybridFactor : public Factor { private: /// Record what category of HybridFactor this is. - HybridCategory category_; + HybridCategory category_ = HybridCategory::None; protected: // Set of DiscreteKeys for this factor. From 8cb95d5b5a7aad89fbad616cefb69d7f90b7a1e6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 16 Sep 2024 13:31:03 -0400 Subject: [PATCH 240/398] remove redundancy from HybridConditional constructors --- gtsam/hybrid/HybridConditional.cpp | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/gtsam/hybrid/HybridConditional.cpp b/gtsam/hybrid/HybridConditional.cpp index 8a8511aef..ed8125c2b 100644 --- a/gtsam/hybrid/HybridConditional.cpp +++ b/gtsam/hybrid/HybridConditional.cpp @@ -28,14 +28,9 @@ HybridConditional::HybridConditional(const KeyVector &continuousFrontals, const DiscreteKeys &discreteFrontals, const KeyVector &continuousParents, const DiscreteKeys &discreteParents) - : HybridConditional( - CollectKeys( - {continuousFrontals.begin(), continuousFrontals.end()}, - KeyVector{continuousParents.begin(), continuousParents.end()}), - CollectDiscreteKeys( - {discreteFrontals.begin(), discreteFrontals.end()}, - {discreteParents.begin(), discreteParents.end()}), - continuousFrontals.size() + discreteFrontals.size()) {} + : HybridConditional(CollectKeys(continuousFrontals, continuousParents), + CollectDiscreteKeys(discreteFrontals, discreteParents), + continuousFrontals.size() + discreteFrontals.size()) {} /* ************************************************************************ */ HybridConditional::HybridConditional( @@ -56,9 +51,7 @@ HybridConditional::HybridConditional( /* ************************************************************************ */ HybridConditional::HybridConditional( const std::shared_ptr &gaussianMixture) - : BaseFactor(KeyVector(gaussianMixture->keys().begin(), - gaussianMixture->keys().begin() + - gaussianMixture->nrContinuous()), + : BaseFactor(gaussianMixture->continuousKeys(), gaussianMixture->discreteKeys()), BaseConditional(gaussianMixture->nrFrontals()) { inner_ = gaussianMixture; From 64d9fc67bd41aa07eb17c807f03a5e6da9b93c3b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 16 Sep 2024 17:30:08 -0400 Subject: [PATCH 241/398] tree based constructors --- gtsam/hybrid/tests/Switching.h | 19 ++++++++++--------- gtsam/hybrid/tests/testHybridBayesNet.cpp | 12 +++++++----- 2 files changed, 17 insertions(+), 14 deletions(-) diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 9b7c44d02..4be3cff01 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -57,15 +57,16 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( // keyFunc(1) to keyFunc(n+1) for (size_t t = 1; t < n; t++) { - std::vector components = { - {std::make_shared(keyFunc(t), I_3x3, keyFunc(t + 1), - I_3x3, Z_3x1), - 0.0}, - {std::make_shared(keyFunc(t), I_3x3, keyFunc(t + 1), - I_3x3, Vector3::Ones()), - 0.0}}; - hfg.add(HybridGaussianFactor({keyFunc(t), keyFunc(t + 1)}, - {{dKeyFunc(t), 2}}, components)); + DiscreteKeys dKeys{{dKeyFunc(t), 2}}; + HybridGaussianFactor::FactorValuePairs components( + dKeys, {{std::make_shared(keyFunc(t), I_3x3, + keyFunc(t + 1), I_3x3, Z_3x1), + 0.0}, + {std::make_shared( + keyFunc(t), I_3x3, keyFunc(t + 1), I_3x3, Vector3::Ones()), + 0.0}}); + hfg.add( + HybridGaussianFactor({keyFunc(t), keyFunc(t + 1)}, dKeys, components)); if (t > 1) { hfg.add(DecisionTreeFactor({{dKeyFunc(t - 1), 2}, {dKeyFunc(t), 2}}, diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index cf4231dba..8b48c14b6 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -383,15 +383,17 @@ TEST(HybridBayesNet, Sampling) { HybridNonlinearFactorGraph nfg; auto noise_model = noiseModel::Diagonal::Sigmas(Vector1(1.0)); + nfg.emplace_shared>(X(0), 0.0, noise_model); + auto zero_motion = std::make_shared>(X(0), X(1), 0, noise_model); auto one_motion = std::make_shared>(X(0), X(1), 1, noise_model); - std::vector factors = {{zero_motion, 0.0}, - {one_motion, 0.0}}; - nfg.emplace_shared>(X(0), 0.0, noise_model); - nfg.emplace_shared( - KeyVector{X(0), X(1)}, DiscreteKeys{DiscreteKey(M(0), 2)}, factors); + DiscreteKeys discreteKeys{DiscreteKey(M(0), 2)}; + HybridNonlinearFactor::Factors factors( + discreteKeys, {{zero_motion, 0.0}, {one_motion, 0.0}}); + nfg.emplace_shared(KeyVector{X(0), X(1)}, discreteKeys, + factors); DiscreteKey mode(M(0), 2); nfg.emplace_shared(mode, "1/1"); From 094db1eb799dcb913675342699fdb3802b095d92 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 16 Sep 2024 18:39:40 -0400 Subject: [PATCH 242/398] correct documentation and test for ComputeLogNormalizer --- gtsam/linear/NoiseModel.cpp | 3 +++ gtsam/linear/NoiseModel.h | 2 +- gtsam/linear/tests/testNoiseModel.cpp | 20 ++++++++++++++++++++ 3 files changed, 24 insertions(+), 1 deletion(-) diff --git a/gtsam/linear/NoiseModel.cpp b/gtsam/linear/NoiseModel.cpp index 29222a106..a586d119b 100644 --- a/gtsam/linear/NoiseModel.cpp +++ b/gtsam/linear/NoiseModel.cpp @@ -714,6 +714,9 @@ double ComputeLogNormalizer( const noiseModel::Gaussian::shared_ptr& noise_model) { // Since noise models are Gaussian, we can get the logDeterminant using // the same trick as in GaussianConditional + // Sigma = (R'R)^{-1}, det(Sigma) = det((R'R)^{-1}) = det(R'R)^{-1} + // log det(Sigma) = -log(det(R'R)) = -2*log(det(R)) + // Hence, log det(Sigma)) = -2.0 * logDetR() double logDetR = noise_model->R() .diagonal() .unaryExpr([](double x) { return log(x); }) diff --git a/gtsam/linear/NoiseModel.h b/gtsam/linear/NoiseModel.h index 831cfd7dd..ffc1a3ebc 100644 --- a/gtsam/linear/NoiseModel.h +++ b/gtsam/linear/NoiseModel.h @@ -752,7 +752,7 @@ namespace gtsam { template<> struct traits : public Testable {}; /** - * @brief Helper function to compute the sqrt(|2πΣ|) normalizer values + * @brief Helper function to compute the log(|2πΣ|) normalizer values * for a Gaussian noise model. * We compute this in the log-space for numerical accuracy. * diff --git a/gtsam/linear/tests/testNoiseModel.cpp b/gtsam/linear/tests/testNoiseModel.cpp index 725680501..49874f901 100644 --- a/gtsam/linear/tests/testNoiseModel.cpp +++ b/gtsam/linear/tests/testNoiseModel.cpp @@ -807,6 +807,26 @@ TEST(NoiseModel, NonDiagonalGaussian) } } +TEST(NoiseModel, ComputeLogNormalizer) { + // Very simple 1D noise model, which we can compute by hand. + double sigma = 0.1; + auto noise_model = Isotropic::Sigma(1, sigma); + double actual_value = ComputeLogNormalizer(noise_model); + // Compute log(|2πΣ|) by hand. + // = log(2π) + log(Σ) (since it is 1D) + constexpr double log2pi = 1.8378770664093454835606594728112; + double expected_value = log2pi + log(sigma * sigma); + EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); + + // Similar situation in the 3D case + size_t n = 3; + auto noise_model2 = Isotropic::Sigma(n, sigma); + double actual_value2 = ComputeLogNormalizer(noise_model2); + // We multiply by 3 due to the determinant + double expected_value2 = n * (log2pi + log(sigma * sigma)); + EXPECT_DOUBLES_EQUAL(expected_value2, actual_value2, 1e-9); +} + /* ************************************************************************* */ int main() { TestResult tr; return TestRegistry::runAllTests(tr); } /* ************************************************************************* */ From f9031f53b454ed2932693e2fbc2551e353df2850 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 16 Sep 2024 18:43:07 -0400 Subject: [PATCH 243/398] fix error function --- gtsam/hybrid/HybridNonlinearFactor.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 742b4c162..096c14f77 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -140,7 +140,7 @@ class HybridNonlinearFactor : public HybridFactor { auto errorFunc = [continuousValues](const std::pair& f) { auto [factor, val] = f; - return factor->error(continuousValues) + (0.5 * val * val); + return factor->error(continuousValues) + (0.5 * val); }; DecisionTree result(factors_, errorFunc); return result; @@ -159,7 +159,7 @@ class HybridNonlinearFactor : public HybridFactor { auto [factor, val] = factors_(discreteValues); // Compute the error for the selected factor const double factorError = factor->error(continuousValues); - return factorError + (0.5 * val * val); + return factorError + (0.5 * val); } /** From ef2ffd4115b6a40d1b2b2567cbf3d2060633bd12 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 16 Sep 2024 19:46:00 -0400 Subject: [PATCH 244/398] cleaner assignment in augment() --- gtsam/hybrid/HybridGaussianFactor.cpp | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index bfebb064e..cabe6defc 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -42,9 +42,11 @@ HybridGaussianFactor::Factors augment( const HybridGaussianFactor::FactorValuePairs &factors) { // Find the minimum value so we can "proselytize" to positive values. // Done because we can't have sqrt of negative numbers. - auto unzipped_pair = unzip(factors); - const HybridGaussianFactor::Factors gaussianFactors = unzipped_pair.first; - const AlgebraicDecisionTree valueTree = unzipped_pair.second; + HybridGaussianFactor::Factors gaussianFactors; + AlgebraicDecisionTree valueTree; + std::tie(gaussianFactors, valueTree) = unzip(factors); + + // Normalize double min_value = valueTree.min(); AlgebraicDecisionTree values = valueTree.apply([&min_value](double n) { return n - min_value; }); From 091352806b2ec648270d18c3ee21baeb44a72186 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 17 Sep 2024 13:37:44 -0400 Subject: [PATCH 245/398] update API to only allow a single DiscreteKey and vector of size the same as the discrete key cardinality --- gtsam/hybrid/HybridGaussianFactor.h | 10 +++++----- gtsam/hybrid/HybridNonlinearFactor.h | 9 +++++---- gtsam/hybrid/tests/Switching.h | 4 ++-- gtsam/hybrid/tests/testHybridEstimation.cpp | 18 ++++++------------ .../tests/testHybridGaussianFactorGraph.cpp | 4 ++-- gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 6 +++--- .../tests/testHybridNonlinearFactorGraph.cpp | 8 ++++---- gtsam/hybrid/tests/testHybridNonlinearISAM.cpp | 6 +++--- gtsam/hybrid/tests/testSerializationHybrid.cpp | 4 ++-- 9 files changed, 32 insertions(+), 37 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 333c5d25b..5f9cd925e 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -96,15 +96,15 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * GaussianFactor shared pointers. * * @param continuousKeys Vector of keys for continuous factors. - * @param discreteKeys Vector of discrete keys. + * @param discreteKey The discrete key to index each component. * @param factors Vector of gaussian factor shared pointers - * and arbitrary scalars. + * and arbitrary scalars. Same size as the cardinality of discreteKey. */ HybridGaussianFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys, + const DiscreteKey &discreteKey, const std::vector &factors) - : HybridGaussianFactor(continuousKeys, discreteKeys, - FactorValuePairs(discreteKeys, factors)) {} + : HybridGaussianFactor(continuousKeys, {discreteKey}, + FactorValuePairs({discreteKey}, factors)) {} /// @} /// @name Testable diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 096c14f77..1120fc948 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -92,17 +92,18 @@ class HybridNonlinearFactor : public HybridFactor { * @tparam FACTOR The type of the factor shared pointers being passed in. * Will be typecast to NonlinearFactor shared pointers. * @param keys Vector of keys for continuous factors. - * @param discreteKeys Vector of discrete keys. + * @param discreteKey The discrete key indexing each component factor. * @param factors Vector of nonlinear factor and scalar pairs. + * Same size as the cardinality of discreteKey. * @param normalized Flag indicating if the factor error is already * normalized. */ template HybridNonlinearFactor( - const KeyVector& keys, const DiscreteKeys& discreteKeys, + const KeyVector& keys, const DiscreteKey& discreteKey, const std::vector, double>>& factors, bool normalized = false) - : Base(keys, discreteKeys), normalized_(normalized) { + : Base(keys, {discreteKey}), normalized_(normalized) { std::vector nonlinear_factors; KeySet continuous_keys_set(keys.begin(), keys.end()); KeySet factor_keys_set; @@ -118,7 +119,7 @@ class HybridNonlinearFactor : public HybridFactor { "Factors passed into HybridNonlinearFactor need to be nonlinear!"); } } - factors_ = Factors(discreteKeys, nonlinear_factors); + factors_ = Factors({discreteKey}, nonlinear_factors); if (continuous_keys_set != factor_keys_set) { throw std::runtime_error( diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 4be3cff01..f18c19559 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -168,8 +168,8 @@ struct Switching { components.push_back( {std::dynamic_pointer_cast(f), 0.0}); } - nonlinearFactorGraph.emplace_shared( - keys, DiscreteKeys{modes[k]}, components); + nonlinearFactorGraph.emplace_shared(keys, modes[k], + components); } // Add measurement factors diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 7204d819d..ce70e41fa 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -437,8 +437,8 @@ static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() { std::make_shared>(X(0), X(1), 1, noise_model); std::vector components = {{zero_motion, 0.0}, {one_motion, 0.0}}; - nfg.emplace_shared(KeyVector{X(0), X(1)}, - DiscreteKeys{m}, components); + nfg.emplace_shared(KeyVector{X(0), X(1)}, m, + components); return nfg; } @@ -583,9 +583,6 @@ TEST(HybridEstimation, ModeSelection) { graph.emplace_shared>(X(0), 0.0, measurement_model); graph.emplace_shared>(X(1), 0.0, measurement_model); - DiscreteKeys modes; - modes.emplace_back(M(0), 2); - // The size of the noise model size_t d = 1; double noise_tight = 0.5, noise_loose = 5.0; @@ -594,11 +591,11 @@ TEST(HybridEstimation, ModeSelection) { X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_loose)), model1 = std::make_shared( X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight)); - std::vector components = {{model0, 0.0}, {model1, 0.0}}; KeyVector keys = {X(0), X(1)}; + DiscreteKey modes(M(0), 2); HybridNonlinearFactor mf(keys, modes, components); initial.insert(X(0), 0.0); @@ -617,7 +614,7 @@ TEST(HybridEstimation, ModeSelection) { /**************************************************************/ HybridBayesNet bn; - const DiscreteKey mode{M(0), 2}; + const DiscreteKey mode(M(0), 2); bn.push_back( GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1)); @@ -648,7 +645,7 @@ TEST(HybridEstimation, ModeSelection2) { double noise_tight = 0.5, noise_loose = 5.0; HybridBayesNet bn; - const DiscreteKey mode{M(0), 2}; + const DiscreteKey mode(M(0), 2); bn.push_back( GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1)); @@ -679,18 +676,15 @@ TEST(HybridEstimation, ModeSelection2) { graph.emplace_shared>(X(0), Z_3x1, measurement_model); graph.emplace_shared>(X(1), Z_3x1, measurement_model); - DiscreteKeys modes; - modes.emplace_back(M(0), 2); - auto model0 = std::make_shared>( X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_loose)), model1 = std::make_shared>( X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_tight)); - std::vector components = {{model0, 0.0}, {model1, 0.0}}; KeyVector keys = {X(0), X(1)}; + DiscreteKey modes(M(0), 2); HybridNonlinearFactor mf(keys, modes, components); initial.insert(X(0), Z_3x1); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 122f905ff..75ea80029 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -181,7 +181,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { std::vector factors = { {std::make_shared(X(1), I_3x3, Z_3x1), 0.0}, {std::make_shared(X(1), I_3x3, Vector3::Ones()), 0.0}}; - hfg.add(HybridGaussianFactor({X(1)}, {{M(1), 2}}, factors)); + hfg.add(HybridGaussianFactor({X(1)}, {M(1), 2}, factors)); hfg.add(DecisionTreeFactor(m1, {2, 8})); // TODO(Varun) Adding extra discrete variable not connected to continuous @@ -241,7 +241,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { std::vector factors = { {std::make_shared(X(0), I_3x3, Z_3x1), 0.0}, {std::make_shared(X(0), I_3x3, Vector3::Ones()), 0.0}}; - hfg.add(HybridGaussianFactor({X(0)}, {{M(0), 2}}, factors)); + hfg.add(HybridGaussianFactor({X(0)}, {M(0), 2}, factors)); DecisionTree dt1( M(1), {std::make_shared(X(2), I_3x3, Z_3x1), 0.0}, diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index cf983254a..e2a10a783 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -423,7 +423,7 @@ TEST(HybridGaussianISAM, NonTrivial) { std::vector> components = { {moving, 0.0}, {still, 0.0}}; auto mixtureFactor = std::make_shared( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); + contKeys, gtsam::DiscreteKey(M(1), 2), components); fg.push_back(mixtureFactor); // Add equivalent of ImuFactor @@ -463,7 +463,7 @@ TEST(HybridGaussianISAM, NonTrivial) { std::make_shared(W(1), W(2), odometry, noise_model); components = {{moving, 0.0}, {still, 0.0}}; mixtureFactor = std::make_shared( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); + contKeys, gtsam::DiscreteKey(M(2), 2), components); fg.push_back(mixtureFactor); // Add equivalent of ImuFactor @@ -506,7 +506,7 @@ TEST(HybridGaussianISAM, NonTrivial) { std::make_shared(W(2), W(3), odometry, noise_model); components = {{moving, 0.0}, {still, 0.0}}; mixtureFactor = std::make_shared( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); + contKeys, gtsam::DiscreteKey(M(3), 2), components); fg.push_back(mixtureFactor); // Add equivalent of ImuFactor diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 00d0d3a39..3631ac44e 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -135,7 +135,7 @@ TEST(HybridGaussianFactorGraph, Resize) { std::vector> components = { {still, 0.0}, {moving, 0.0}}; auto dcFactor = std::make_shared( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); + contKeys, gtsam::DiscreteKey(M(1), 2), components); nhfg.push_back(dcFactor); Values linearizationPoint; @@ -170,12 +170,12 @@ TEST(HybridGaussianFactorGraph, HybridNonlinearFactor) { // Check for exception when number of continuous keys are under-specified. KeyVector contKeys = {X(0)}; THROWS_EXCEPTION(std::make_shared( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components)); + contKeys, gtsam::DiscreteKey(M(1), 2), components)); // Check for exception when number of continuous keys are too many. contKeys = {X(0), X(1), X(2)}; THROWS_EXCEPTION(std::make_shared( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components)); + contKeys, gtsam::DiscreteKey(M(1), 2), components)); } /***************************************************************************** @@ -807,7 +807,7 @@ TEST(HybridFactorGraph, DefaultDecisionTree) { std::vector> motion_models = {{still, 0.0}, {moving, 0.0}}; fg.emplace_shared( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, motion_models); + contKeys, gtsam::DiscreteKey(M(1), 2), motion_models); // Add Range-Bearing measurements to from X0 to L0 and X1 to L1. // create a noise model for the landmark measurements diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index 7519162eb..6932508df 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -442,7 +442,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { std::vector> components = { {moving, 0.0}, {still, 0.0}}; auto mixtureFactor = std::make_shared( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); + contKeys, gtsam::DiscreteKey(M(1), 2), components); fg.push_back(mixtureFactor); // Add equivalent of ImuFactor @@ -482,7 +482,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { std::make_shared(W(1), W(2), odometry, noise_model); components = {{moving, 0.0}, {still, 0.0}}; mixtureFactor = std::make_shared( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); + contKeys, gtsam::DiscreteKey(M(2), 2), components); fg.push_back(mixtureFactor); // Add equivalent of ImuFactor @@ -525,7 +525,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { std::make_shared(W(2), W(3), odometry, noise_model); components = {{moving, 0.0}, {still, 0.0}}; mixtureFactor = std::make_shared( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); + contKeys, gtsam::DiscreteKey(M(3), 2), components); fg.push_back(mixtureFactor); // Add equivalent of ImuFactor diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp index 74cfa72e6..55f625992 100644 --- a/gtsam/hybrid/tests/testSerializationHybrid.cpp +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -76,7 +76,7 @@ BOOST_CLASS_EXPORT_GUID(HybridBayesNet, "gtsam_HybridBayesNet"); // Test HybridGaussianFactor serialization. TEST(HybridSerialization, HybridGaussianFactor) { KeyVector continuousKeys{X(0)}; - DiscreteKeys discreteKeys{{M(0), 2}}; + DiscreteKey discreteKey{M(0), 2}; auto A = Matrix::Zero(2, 1); auto b0 = Matrix::Zero(2, 1); @@ -85,7 +85,7 @@ TEST(HybridSerialization, HybridGaussianFactor) { auto f1 = std::make_shared(X(0), A, b1); std::vector factors{{f0, 0.0}, {f1, 0.0}}; - const HybridGaussianFactor factor(continuousKeys, discreteKeys, factors); + const HybridGaussianFactor factor(continuousKeys, discreteKey, factors); EXPECT(equalsObj(factor)); EXPECT(equalsXML(factor)); From d4923dbfa9012beaf8d33f25550f0c3c7885d4de Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 17 Sep 2024 13:58:59 -0400 Subject: [PATCH 246/398] Use DecisionTree for constructing HybridGaussianConditional --- gtsam/hybrid/HybridGaussianConditional.cpp | 18 ------- gtsam/hybrid/HybridGaussianConditional.h | 28 +---------- gtsam/hybrid/tests/TinyHybridExample.h | 9 ++-- gtsam/hybrid/tests/testHybridBayesNet.cpp | 12 +++-- gtsam/hybrid/tests/testHybridEstimation.cpp | 24 ++++++---- .../tests/testHybridGaussianConditional.cpp | 8 +++- .../hybrid/tests/testHybridGaussianFactor.cpp | 10 +++- .../tests/testHybridGaussianFactorGraph.cpp | 47 ++++++++++++------- .../hybrid/tests/testSerializationHybrid.cpp | 3 +- 9 files changed, 76 insertions(+), 83 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 2b2062e6f..8fce251cc 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -55,24 +55,6 @@ HybridGaussianConditional::conditionals() const { return conditionals_; } -/* *******************************************************************************/ -HybridGaussianConditional::HybridGaussianConditional( - KeyVector &&continuousFrontals, KeyVector &&continuousParents, - DiscreteKeys &&discreteParents, - std::vector &&conditionals) - : HybridGaussianConditional(continuousFrontals, continuousParents, - discreteParents, - Conditionals(discreteParents, conditionals)) {} - -/* *******************************************************************************/ -HybridGaussianConditional::HybridGaussianConditional( - const KeyVector &continuousFrontals, const KeyVector &continuousParents, - const DiscreteKeys &discreteParents, - const std::vector &conditionals) - : HybridGaussianConditional(continuousFrontals, continuousParents, - discreteParents, - Conditionals(discreteParents, conditionals)) {} - /* *******************************************************************************/ // TODO(dellaert): This is copy/paste: HybridGaussianConditional should be // derived from HybridGaussianFactor, no? diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index fc315c6f9..2c132d3bf 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -106,32 +106,6 @@ class GTSAM_EXPORT HybridGaussianConditional const DiscreteKeys &discreteParents, const Conditionals &conditionals); - /** - * @brief Make a Gaussian Mixture from a list of Gaussian conditionals - * - * @param continuousFrontals The continuous frontal variables - * @param continuousParents The continuous parent variables - * @param discreteParents Discrete parents variables - * @param conditionals List of conditionals - */ - HybridGaussianConditional( - KeyVector &&continuousFrontals, KeyVector &&continuousParents, - DiscreteKeys &&discreteParents, - std::vector &&conditionals); - - /** - * @brief Make a Gaussian Mixture from a list of Gaussian conditionals - * - * @param continuousFrontals The continuous frontal variables - * @param continuousParents The continuous parent variables - * @param discreteParents Discrete parents variables - * @param conditionals List of conditionals - */ - HybridGaussianConditional( - const KeyVector &continuousFrontals, const KeyVector &continuousParents, - const DiscreteKeys &discreteParents, - const std::vector &conditionals); - /// @} /// @name Testable /// @{ @@ -273,7 +247,7 @@ class GTSAM_EXPORT HybridGaussianConditional #endif }; -/// Return the DiscreteKey vector as a set. +/// Return the DiscreteKeys vector as a set. std::set DiscreteKeysAsSet(const DiscreteKeys &discreteKeys); // traits diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index 00af28308..b2b944f91 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -43,12 +43,13 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1, // Create Gaussian mixture z_i = x0 + noise for each measurement. for (size_t i = 0; i < num_measurements; i++) { const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode; + DiscreteKeys modes{mode_i}; + std::vector conditionals{ + GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 0.5), + GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 3)}; bayesNet.emplace_shared( KeyVector{Z(i)}, KeyVector{X(0)}, DiscreteKeys{mode_i}, - std::vector{GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), - Z_1x1, 0.5), - GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), - Z_1x1, 3)}); + HybridGaussianConditional::Conditionals(modes, conditionals)); } // Create prior on X(0). diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 8b48c14b6..462ce8625 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -107,9 +107,11 @@ TEST(HybridBayesNet, evaluateHybrid) { // Create hybrid Bayes net. HybridBayesNet bayesNet; bayesNet.push_back(continuousConditional); + DiscreteKeys discreteParents{Asia}; bayesNet.emplace_shared( - KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia}, - std::vector{conditional0, conditional1}); + KeyVector{X(1)}, KeyVector{}, discreteParents, + HybridGaussianConditional::Conditionals( + discreteParents, std::vector{conditional0, conditional1})); bayesNet.emplace_shared(Asia, "99/1"); // Create values at which to evaluate. @@ -168,9 +170,11 @@ TEST(HybridBayesNet, Error) { conditional1 = std::make_shared( X(1), Vector1::Constant(2), I_1x1, model1); + DiscreteKeys discreteParents{Asia}; auto gm = std::make_shared( - KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia}, - std::vector{conditional0, conditional1}); + KeyVector{X(1)}, KeyVector{}, discreteParents, + HybridGaussianConditional::Conditionals( + discreteParents, std::vector{conditional0, conditional1})); // Create hybrid Bayes net. HybridBayesNet bayesNet; bayesNet.push_back(continuousConditional); diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index ce70e41fa..ee48ad5d8 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -620,12 +620,16 @@ TEST(HybridEstimation, ModeSelection) { GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1)); bn.push_back( GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1)); + + std::vector conditionals{ + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), + Z_1x1, noise_loose), + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), + Z_1x1, noise_tight)}; bn.emplace_shared( KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode}, - std::vector{GaussianConditional::sharedMeanAndStddev( - Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_loose), - GaussianConditional::sharedMeanAndStddev( - Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_tight)}); + HybridGaussianConditional::Conditionals(DiscreteKeys{mode}, + conditionals)); VectorValues vv; vv.insert(Z(0), Z_1x1); @@ -651,12 +655,16 @@ TEST(HybridEstimation, ModeSelection2) { GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1)); bn.push_back( GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1)); + + std::vector conditionals{ + GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1), + Z_3x1, noise_loose), + GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1), + Z_3x1, noise_tight)}; bn.emplace_shared( KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode}, - std::vector{GaussianConditional::sharedMeanAndStddev( - Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_loose), - GaussianConditional::sharedMeanAndStddev( - Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_tight)}); + HybridGaussianConditional::Conditionals(DiscreteKeys{mode}, + conditionals)); VectorValues vv; vv.insert(Z(0), Z_3x1); diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index 6156fee96..d01158413 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -52,7 +52,9 @@ const std::vector conditionals{ commonSigma), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), commonSigma)}; -const HybridGaussianConditional mixture({Z(0)}, {X(0)}, {mode}, conditionals); +const HybridGaussianConditional mixture( + {Z(0)}, {X(0)}, {mode}, + HybridGaussianConditional::Conditionals({mode}, conditionals)); } // namespace equal_constants /* ************************************************************************* */ @@ -153,7 +155,9 @@ const std::vector conditionals{ 0.5), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), 3.0)}; -const HybridGaussianConditional mixture({Z(0)}, {X(0)}, {mode}, conditionals); +const HybridGaussianConditional mixture( + {Z(0)}, {X(0)}, {mode}, + HybridGaussianConditional::Conditionals({mode}, conditionals)); } // namespace mode_dependent_constants /* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 11226e19d..850a1e4d1 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -233,8 +233,11 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, c1 = make_shared(z, Vector1(mu1), I_1x1, model1); HybridBayesNet hbn; + DiscreteKeys discreteParents{m}; hbn.emplace_shared( - KeyVector{z}, KeyVector{}, DiscreteKeys{m}, std::vector{c0, c1}); + KeyVector{z}, KeyVector{}, discreteParents, + HybridGaussianConditional::Conditionals(discreteParents, + std::vector{c0, c1})); auto mixing = make_shared(m, "50/50"); hbn.push_back(mixing); @@ -408,8 +411,11 @@ static HybridGaussianConditional::shared_ptr CreateHybridMotionModel( -I_1x1, model0), c1 = make_shared(X(1), Vector1(mu1), I_1x1, X(0), -I_1x1, model1); + DiscreteKeys discreteParents{m1}; return std::make_shared( - KeyVector{X(1)}, KeyVector{X(0)}, DiscreteKeys{m1}, std::vector{c0, c1}); + KeyVector{X(1)}, KeyVector{X(0)}, discreteParents, + HybridGaussianConditional::Conditionals(discreteParents, + std::vector{c0, c1})); } /// Create two state Bayes network with 1 or two measurement models diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 75ea80029..dc88c8b7e 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -682,8 +682,11 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) { x0, -I_1x1, model0), c1 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, x0, -I_1x1, model1); + DiscreteKeys discreteParents{m1}; hbn.emplace_shared( - KeyVector{f01}, KeyVector{x0, x1}, DiscreteKeys{m1}, std::vector{c0, c1}); + KeyVector{f01}, KeyVector{x0, x1}, discreteParents, + HybridGaussianConditional::Conditionals(discreteParents, + std::vector{c0, c1})); // Discrete uniform prior. hbn.emplace_shared(m1, "0.5/0.5"); @@ -806,9 +809,11 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) { X(0), Vector1(14.1421), I_1x1 * 2.82843), conditional1 = std::make_shared( X(0), Vector1(10.1379), I_1x1 * 2.02759); + DiscreteKeys discreteParents{mode}; expectedBayesNet.emplace_shared( - KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, - std::vector{conditional0, conditional1}); + KeyVector{X(0)}, KeyVector{}, discreteParents, + HybridGaussianConditional::Conditionals( + discreteParents, std::vector{conditional0, conditional1})); // Add prior on mode. expectedBayesNet.emplace_shared(mode, "74/26"); @@ -831,12 +836,13 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { HybridBayesNet bn; // Create Gaussian mixture z_0 = x0 + noise for each measurement. + std::vector conditionals{ + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3), + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 0.5)}; auto gm = std::make_shared( KeyVector{Z(0)}, KeyVector{X(0)}, DiscreteKeys{mode}, - std::vector{ - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3), - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, - 0.5)}); + HybridGaussianConditional::Conditionals(DiscreteKeys{mode}, + conditionals)); bn.push_back(gm); // Create prior on X(0). @@ -865,7 +871,8 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { X(0), Vector1(14.1421), I_1x1 * 2.82843); expectedBayesNet.emplace_shared( KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, - std::vector{conditional0, conditional1}); + HybridGaussianConditional::Conditionals( + DiscreteKeys{mode}, std::vector{conditional0, conditional1})); // Add prior on mode. expectedBayesNet.emplace_shared(mode, "1/1"); @@ -902,7 +909,8 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) { X(0), Vector1(10.274), I_1x1 * 2.0548); expectedBayesNet.emplace_shared( KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, - std::vector{conditional0, conditional1}); + HybridGaussianConditional::Conditionals( + DiscreteKeys{mode}, std::vector{conditional0, conditional1})); // Add prior on mode. expectedBayesNet.emplace_shared(mode, "23/77"); @@ -947,12 +955,14 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { for (size_t t : {0, 1, 2}) { // Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t): const auto noise_mode_t = DiscreteKey{N(t), 2}; + std::vector conditionals{ + GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), Z_1x1, 0.5), + GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), Z_1x1, + 3.0)}; bn.emplace_shared( KeyVector{Z(t)}, KeyVector{X(t)}, DiscreteKeys{noise_mode_t}, - std::vector{GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), - Z_1x1, 0.5), - GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), - Z_1x1, 3.0)}); + HybridGaussianConditional::Conditionals(DiscreteKeys{noise_mode_t}, + conditionals)); // Create prior on discrete mode N(t): bn.emplace_shared(noise_mode_t, "20/80"); @@ -962,12 +972,15 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { for (size_t t : {2, 1}) { // Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1): const auto motion_model_t = DiscreteKey{M(t), 2}; + std::vector conditionals{ + GaussianConditional::sharedMeanAndStddev(X(t), I_1x1, X(t - 1), Z_1x1, + 0.2), + GaussianConditional::sharedMeanAndStddev(X(t), I_1x1, X(t - 1), I_1x1, + 0.2)}; auto gm = std::make_shared( KeyVector{X(t)}, KeyVector{X(t - 1)}, DiscreteKeys{motion_model_t}, - std::vector{GaussianConditional::sharedMeanAndStddev( - X(t), I_1x1, X(t - 1), Z_1x1, 0.2), - GaussianConditional::sharedMeanAndStddev( - X(t), I_1x1, X(t - 1), I_1x1, 0.2)}); + HybridGaussianConditional::Conditionals(DiscreteKeys{motion_model_t}, + conditionals)); bn.push_back(gm); // Create prior on motion model M(t): diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp index 55f625992..701b870f1 100644 --- a/gtsam/hybrid/tests/testSerializationHybrid.cpp +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -116,7 +116,8 @@ TEST(HybridSerialization, HybridGaussianConditional) { const auto conditional1 = std::make_shared( GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3)); const HybridGaussianConditional gm({Z(0)}, {X(0)}, {mode}, - {conditional0, conditional1}); + HybridGaussianConditional::Conditionals( + {mode}, {conditional0, conditional1})); EXPECT(equalsObj(gm)); EXPECT(equalsXML(gm)); From a276affe00e095f7f69e94b7ad9fbf2ba50ec0a1 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 17 Sep 2024 14:20:46 -0400 Subject: [PATCH 247/398] fix wrapper --- gtsam/hybrid/hybrid.i | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index dcf0fce04..8b825e482 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -76,7 +76,7 @@ virtual class HybridConditional { class HybridGaussianFactor : gtsam::HybridFactor { HybridGaussianFactor( const gtsam::KeyVector& continuousKeys, - const gtsam::DiscreteKeys& discreteKeys, + const gtsam::DiscreteKey& discreteKey, const std::vector>& factorsList); @@ -91,8 +91,7 @@ class HybridGaussianConditional : gtsam::HybridFactor { const gtsam::KeyVector& continuousFrontals, const gtsam::KeyVector& continuousParents, const gtsam::DiscreteKeys& discreteParents, - const std::vector& - conditionalsList); + const gtsam::HybridGaussianConditional::Conditionals& conditionals); gtsam::HybridGaussianFactor* likelihood( const gtsam::VectorValues& frontals) const; @@ -248,7 +247,7 @@ class HybridNonlinearFactor : gtsam::HybridFactor { bool normalized = false); HybridNonlinearFactor( - const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys, + const gtsam::KeyVector& keys, const gtsam::DiscreteKey& discreteKey, const std::vector>& factors, bool normalized = false); From dbd0ae1f272e6da6d84f457ea1106debd27ba5d6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 17 Sep 2024 14:51:24 -0400 Subject: [PATCH 248/398] reintroduce vector based HybridGaussianConditional constructor --- gtsam/hybrid/HybridGaussianConditional.cpp | 9 +++++++++ gtsam/hybrid/HybridGaussianConditional.h | 16 +++++++++++++++- gtsam/hybrid/hybrid.i | 5 +++++ 3 files changed, 29 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 8fce251cc..a8ed7eed1 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -55,6 +55,15 @@ HybridGaussianConditional::conditionals() const { return conditionals_; } +/* *******************************************************************************/ +HybridGaussianConditional::HybridGaussianConditional( + const KeyVector &continuousFrontals, const KeyVector &continuousParents, + const DiscreteKeys &discreteParents, + const std::vector &conditionals) + : HybridGaussianConditional(continuousFrontals, continuousParents, + discreteParents, + Conditionals(discreteParents, conditionals)) {} + /* *******************************************************************************/ // TODO(dellaert): This is copy/paste: HybridGaussianConditional should be // derived from HybridGaussianFactor, no? diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 2c132d3bf..9cd13cbec 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -106,6 +106,20 @@ class GTSAM_EXPORT HybridGaussianConditional const DiscreteKeys &discreteParents, const Conditionals &conditionals); + /** + * @brief Make a Gaussian Mixture from a vector of Gaussian conditionals. + * The DecisionTree-based constructor is preferred over this one. + * + * @param continuousFrontals The continuous frontal variables + * @param continuousParents The continuous parent variables + * @param discreteParents Discrete parents variables + * @param conditionals Vector of conditionals + */ + HybridGaussianConditional( + const KeyVector &continuousFrontals, const KeyVector &continuousParents, + const DiscreteKeys &discreteParents, + const std::vector &conditionals); + /// @} /// @name Testable /// @{ @@ -247,7 +261,7 @@ class GTSAM_EXPORT HybridGaussianConditional #endif }; -/// Return the DiscreteKeys vector as a set. +/// Return the DiscreteKey vector as a set. std::set DiscreteKeysAsSet(const DiscreteKeys &discreteKeys); // traits diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index 8b825e482..485d10848 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -92,6 +92,11 @@ class HybridGaussianConditional : gtsam::HybridFactor { const gtsam::KeyVector& continuousParents, const gtsam::DiscreteKeys& discreteParents, const gtsam::HybridGaussianConditional::Conditionals& conditionals); + HybridGaussianConditional( + const gtsam::KeyVector& continuousFrontals, + const gtsam::KeyVector& continuousParents, + const gtsam::DiscreteKeys& discreteParents, + const std::vector& conditionals); gtsam::HybridGaussianFactor* likelihood( const gtsam::VectorValues& frontals) const; From c1ebdb200c9f1f8c5d6c59ef271d6e86b61e2650 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 17 Sep 2024 14:53:31 -0400 Subject: [PATCH 249/398] update python tests --- python/gtsam/tests/test_HybridFactorGraph.py | 16 ++++++---------- .../tests/test_HybridNonlinearFactorGraph.py | 4 +--- 2 files changed, 7 insertions(+), 13 deletions(-) diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index adbda59ce..ce71d75ed 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -20,7 +20,7 @@ import gtsam from gtsam import (DiscreteConditional, DiscreteKeys, GaussianConditional, HybridBayesNet, HybridGaussianConditional, HybridGaussianFactor, HybridGaussianFactorGraph, - HybridValues, JacobianFactor, Ordering, noiseModel) + HybridValues, JacobianFactor, noiseModel) DEBUG_MARGINALS = False @@ -31,13 +31,11 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): def test_create(self): """Test construction of hybrid factor graph.""" model = noiseModel.Unit.Create(3) - dk = DiscreteKeys() - dk.push_back((C(0), 2)) jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model) jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model) - gmf = HybridGaussianFactor([X(0)], dk, [(jf1, 0), (jf2, 0)]) + gmf = HybridGaussianFactor([X(0)], (C(0), 2), [(jf1, 0), (jf2, 0)]) hfg = HybridGaussianFactorGraph() hfg.push_back(jf1) @@ -58,13 +56,11 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): def test_optimize(self): """Test construction of hybrid factor graph.""" model = noiseModel.Unit.Create(3) - dk = DiscreteKeys() - dk.push_back((C(0), 2)) jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model) jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model) - gmf = HybridGaussianFactor([X(0)], dk, [(jf1, 0), (jf2, 0)]) + gmf = HybridGaussianFactor([X(0)], (C(0), 2), [(jf1, 0), (jf2, 0)]) hfg = HybridGaussianFactorGraph() hfg.push_back(jf1) @@ -96,8 +92,6 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): # Create Gaussian mixture Z(0) = X(0) + noise for each measurement. I_1x1 = np.eye(1) - keys = DiscreteKeys() - keys.push_back(mode) for i in range(num_measurements): conditional0 = GaussianConditional.FromMeanAndStddev(Z(i), I_1x1, @@ -107,8 +101,10 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): I_1x1, X(0), [0], sigma=3) + discreteParents = DiscreteKeys() + discreteParents.push_back(mode) bayesNet.push_back( - HybridGaussianConditional([Z(i)], [X(0)], keys, + HybridGaussianConditional([Z(i)], [X(0)], discreteParents, [conditional0, conditional1])) # Create prior on X(0). diff --git a/python/gtsam/tests/test_HybridNonlinearFactorGraph.py b/python/gtsam/tests/test_HybridNonlinearFactorGraph.py index 247f83c19..4a1abdcf3 100644 --- a/python/gtsam/tests/test_HybridNonlinearFactorGraph.py +++ b/python/gtsam/tests/test_HybridNonlinearFactorGraph.py @@ -27,8 +27,6 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): def test_nonlinear_hybrid(self): nlfg = gtsam.HybridNonlinearFactorGraph() - dk = gtsam.DiscreteKeys() - dk.push_back((10, 2)) nlfg.push_back( BetweenFactorPoint3(1, 2, Point3(1, 2, 3), noiseModel.Diagonal.Variances([1, 1, 1]))) @@ -40,7 +38,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): noiseModel.Unit.Create(3)), 0.0), (PriorFactorPoint3(1, Point3(1, 2, 1), noiseModel.Unit.Create(3)), 0.0)] - nlfg.push_back(gtsam.HybridNonlinearFactor([1], dk, factors)) + nlfg.push_back(gtsam.HybridNonlinearFactor([1], (10, 2), factors)) nlfg.push_back(gtsam.DecisionTreeFactor((10, 2), "1 3")) values = gtsam.Values() values.insert_point3(1, Point3(0, 0, 0)) From 4feec4ddaf070e0f7f0974ca458a1ad50f4ed386 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 18 Sep 2024 04:18:23 -0400 Subject: [PATCH 250/398] rename to Category and put inside HybridFactor class --- gtsam/hybrid/HybridFactor.cpp | 26 ++++++++++++-------------- gtsam/hybrid/HybridFactor.h | 15 ++++++++------- 2 files changed, 20 insertions(+), 21 deletions(-) diff --git a/gtsam/hybrid/HybridFactor.cpp b/gtsam/hybrid/HybridFactor.cpp index 5582166a3..3338951bf 100644 --- a/gtsam/hybrid/HybridFactor.cpp +++ b/gtsam/hybrid/HybridFactor.cpp @@ -50,22 +50,20 @@ DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1, /* ************************************************************************ */ HybridFactor::HybridFactor(const KeyVector &keys) - : Base(keys), - category_(HybridCategory::Continuous), - continuousKeys_(keys) {} + : Base(keys), category_(Category::Continuous), continuousKeys_(keys) {} /* ************************************************************************ */ -HybridCategory GetCategory(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys) { +HybridFactor::Category GetCategory(const KeyVector &continuousKeys, + const DiscreteKeys &discreteKeys) { if ((continuousKeys.size() == 0) && (discreteKeys.size() != 0)) { - return HybridCategory::Discrete; + return HybridFactor::Category::Discrete; } else if ((continuousKeys.size() != 0) && (discreteKeys.size() == 0)) { - return HybridCategory::Continuous; + return HybridFactor::Category::Continuous; } else if ((continuousKeys.size() != 0) && (discreteKeys.size() != 0)) { - return HybridCategory::Hybrid; + return HybridFactor::Category::Hybrid; } else { // Case where we have no keys. Should never happen. - return HybridCategory::None; + return HybridFactor::Category::None; } } @@ -80,7 +78,7 @@ HybridFactor::HybridFactor(const KeyVector &continuousKeys, /* ************************************************************************ */ HybridFactor::HybridFactor(const DiscreteKeys &discreteKeys) : Base(CollectKeys({}, discreteKeys)), - category_(HybridCategory::Discrete), + category_(Category::Discrete), discreteKeys_(discreteKeys), continuousKeys_({}) {} @@ -97,16 +95,16 @@ void HybridFactor::print(const std::string &s, const KeyFormatter &formatter) const { std::cout << (s.empty() ? "" : s + "\n"); switch (category_) { - case HybridCategory::Continuous: + case Category::Continuous: std::cout << "Continuous "; break; - case HybridCategory::Discrete: + case Category::Discrete: std::cout << "Discrete "; break; - case HybridCategory::Hybrid: + case Category::Hybrid: std::cout << "Hybrid "; break; - case HybridCategory::None: + case Category::None: std::cout << "None "; break; } diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index d0b9bbabe..ad29dfdca 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -41,9 +41,6 @@ KeyVector CollectKeys(const KeyVector &keys1, const KeyVector &keys2); DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1, const DiscreteKeys &key2); -/// Enum to help with categorizing hybrid factors. -enum class HybridCategory { None, Discrete, Continuous, Hybrid }; - /** * Base class for *truly* hybrid probabilistic factors * @@ -55,9 +52,13 @@ enum class HybridCategory { None, Discrete, Continuous, Hybrid }; * @ingroup hybrid */ class GTSAM_EXPORT HybridFactor : public Factor { + public: + /// Enum to help with categorizing hybrid factors. + enum class Category { None, Discrete, Continuous, Hybrid }; + private: /// Record what category of HybridFactor this is. - HybridCategory category_ = HybridCategory::None; + Category category_ = Category::None; protected: // Set of DiscreteKeys for this factor. @@ -118,13 +119,13 @@ class GTSAM_EXPORT HybridFactor : public Factor { /// @{ /// True if this is a factor of discrete variables only. - bool isDiscrete() const { return category_ == HybridCategory::Discrete; } + bool isDiscrete() const { return category_ == Category::Discrete; } /// True if this is a factor of continuous variables only. - bool isContinuous() const { return category_ == HybridCategory::Continuous; } + bool isContinuous() const { return category_ == Category::Continuous; } /// True is this is a Discrete-Continuous factor. - bool isHybrid() const { return category_ == HybridCategory::Hybrid; } + bool isHybrid() const { return category_ == Category::Hybrid; } /// Return the number of continuous variables in this factor. size_t nrContinuous() const { return continuousKeys_.size(); } From fa353840b3d0dd16574cf5dadce5a11d6da4ff2c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 18 Sep 2024 15:08:09 -0400 Subject: [PATCH 251/398] update HybridGaussianConditional to use single discrete parent --- gtsam/hybrid/HybridGaussianConditional.cpp | 6 +++--- gtsam/hybrid/HybridGaussianConditional.h | 7 ++++--- gtsam/hybrid/hybrid.i | 2 +- gtsam/hybrid/tests/TinyHybridExample.h | 4 +--- gtsam/hybrid/tests/testHybridBayesNet.cpp | 21 ++++++++------------ python/gtsam/tests/test_HybridBayesNet.py | 4 +--- python/gtsam/tests/test_HybridFactorGraph.py | 4 +--- 7 files changed, 19 insertions(+), 29 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index a8ed7eed1..4c2739c86 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -58,11 +58,11 @@ HybridGaussianConditional::conditionals() const { /* *******************************************************************************/ HybridGaussianConditional::HybridGaussianConditional( const KeyVector &continuousFrontals, const KeyVector &continuousParents, - const DiscreteKeys &discreteParents, + const DiscreteKey &discreteParent, const std::vector &conditionals) : HybridGaussianConditional(continuousFrontals, continuousParents, - discreteParents, - Conditionals(discreteParents, conditionals)) {} + DiscreteKeys{discreteParent}, + Conditionals({discreteParent}, conditionals)) {} /* *******************************************************************************/ // TODO(dellaert): This is copy/paste: HybridGaussianConditional should be diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 9cd13cbec..240160744 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -112,12 +112,13 @@ class GTSAM_EXPORT HybridGaussianConditional * * @param continuousFrontals The continuous frontal variables * @param continuousParents The continuous parent variables - * @param discreteParents Discrete parents variables - * @param conditionals Vector of conditionals + * @param discreteParent Single discrete parent variable + * @param conditionals Vector of conditionals with the same size as the + * cardinality of the discrete parent. */ HybridGaussianConditional( const KeyVector &continuousFrontals, const KeyVector &continuousParents, - const DiscreteKeys &discreteParents, + const DiscreteKey &discreteParent, const std::vector &conditionals); /// @} diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index 485d10848..37bcefe81 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -95,7 +95,7 @@ class HybridGaussianConditional : gtsam::HybridFactor { HybridGaussianConditional( const gtsam::KeyVector& continuousFrontals, const gtsam::KeyVector& continuousParents, - const gtsam::DiscreteKeys& discreteParents, + const gtsam::DiscreteKey& discreteParent, const std::vector& conditionals); gtsam::HybridGaussianFactor* likelihood( diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index b2b944f91..b3bb14c8f 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -43,13 +43,11 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1, // Create Gaussian mixture z_i = x0 + noise for each measurement. for (size_t i = 0; i < num_measurements; i++) { const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode; - DiscreteKeys modes{mode_i}; std::vector conditionals{ GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 0.5), GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 3)}; bayesNet.emplace_shared( - KeyVector{Z(i)}, KeyVector{X(0)}, DiscreteKeys{mode_i}, - HybridGaussianConditional::Conditionals(modes, conditionals)); + KeyVector{Z(i)}, KeyVector{X(0)}, mode_i, conditionals); } // Create prior on X(0). diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 462ce8625..bbe2d3df5 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -107,11 +107,9 @@ TEST(HybridBayesNet, evaluateHybrid) { // Create hybrid Bayes net. HybridBayesNet bayesNet; bayesNet.push_back(continuousConditional); - DiscreteKeys discreteParents{Asia}; bayesNet.emplace_shared( - KeyVector{X(1)}, KeyVector{}, discreteParents, - HybridGaussianConditional::Conditionals( - discreteParents, std::vector{conditional0, conditional1})); + KeyVector{X(1)}, KeyVector{}, Asia, + std::vector{conditional0, conditional1}); bayesNet.emplace_shared(Asia, "99/1"); // Create values at which to evaluate. @@ -170,11 +168,9 @@ TEST(HybridBayesNet, Error) { conditional1 = std::make_shared( X(1), Vector1::Constant(2), I_1x1, model1); - DiscreteKeys discreteParents{Asia}; auto gm = std::make_shared( - KeyVector{X(1)}, KeyVector{}, discreteParents, - HybridGaussianConditional::Conditionals( - discreteParents, std::vector{conditional0, conditional1})); + KeyVector{X(1)}, KeyVector{}, Asia, + std::vector{conditional0, conditional1}); // Create hybrid Bayes net. HybridBayesNet bayesNet; bayesNet.push_back(continuousConditional); @@ -393,11 +389,10 @@ TEST(HybridBayesNet, Sampling) { std::make_shared>(X(0), X(1), 0, noise_model); auto one_motion = std::make_shared>(X(0), X(1), 1, noise_model); - DiscreteKeys discreteKeys{DiscreteKey(M(0), 2)}; - HybridNonlinearFactor::Factors factors( - discreteKeys, {{zero_motion, 0.0}, {one_motion, 0.0}}); - nfg.emplace_shared(KeyVector{X(0), X(1)}, discreteKeys, - factors); + nfg.emplace_shared( + KeyVector{X(0), X(1)}, DiscreteKey(M(0), 2), + std::vector{{zero_motion, 0.0}, + {one_motion, 0.0}}); DiscreteKey mode(M(0), 2); nfg.emplace_shared(mode, "1/1"); diff --git a/python/gtsam/tests/test_HybridBayesNet.py b/python/gtsam/tests/test_HybridBayesNet.py index 6e50c2bfe..2f7dd4a98 100644 --- a/python/gtsam/tests/test_HybridBayesNet.py +++ b/python/gtsam/tests/test_HybridBayesNet.py @@ -43,14 +43,12 @@ class TestHybridBayesNet(GtsamTestCase): # Create the conditionals conditional0 = GaussianConditional(X(1), [5], I_1x1, model0) conditional1 = GaussianConditional(X(1), [2], I_1x1, model1) - discrete_keys = DiscreteKeys() - discrete_keys.push_back(Asia) # Create hybrid Bayes net. bayesNet = HybridBayesNet() bayesNet.push_back(conditional) bayesNet.push_back( - HybridGaussianConditional([X(1)], [], discrete_keys, + HybridGaussianConditional([X(1)], [], Asia, [conditional0, conditional1])) bayesNet.push_back(DiscreteConditional(Asia, "99/1")) diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index ce71d75ed..5eafc05ae 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -101,10 +101,8 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): I_1x1, X(0), [0], sigma=3) - discreteParents = DiscreteKeys() - discreteParents.push_back(mode) bayesNet.push_back( - HybridGaussianConditional([Z(i)], [X(0)], discreteParents, + HybridGaussianConditional([Z(i)], [X(0)], mode, [conditional0, conditional1])) # Create prior on X(0). From 4016de7942201448f676d94efa630241a140ad3c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 16 Sep 2024 14:33:24 -0400 Subject: [PATCH 252/398] update variables and docstrings to remove the mixture terminology --- gtsam/hybrid/HybridBayesNet.cpp | 6 +- gtsam/hybrid/HybridBayesNet.h | 3 +- gtsam/hybrid/HybridBayesTree.cpp | 4 +- gtsam/hybrid/HybridBayesTree.h | 6 +- gtsam/hybrid/HybridConditional.cpp | 10 +-- gtsam/hybrid/HybridConditional.h | 6 +- gtsam/hybrid/HybridGaussianConditional.cpp | 12 ++-- gtsam/hybrid/HybridGaussianConditional.h | 16 ++--- gtsam/hybrid/HybridGaussianFactor.h | 12 ++-- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 9 +-- gtsam/hybrid/HybridGaussianFactorGraph.h | 2 +- gtsam/hybrid/HybridNonlinearFactor.h | 5 +- gtsam/hybrid/HybridNonlinearFactorGraph.cpp | 2 +- gtsam/hybrid/HybridSmoother.cpp | 2 +- gtsam/hybrid/HybridSmoother.h | 10 ++- gtsam/hybrid/tests/TinyHybridExample.h | 2 +- gtsam/hybrid/tests/testHybridConditional.cpp | 4 +- gtsam/hybrid/tests/testHybridEstimation.cpp | 2 +- gtsam/hybrid/tests/testHybridFactorGraph.cpp | 2 +- .../tests/testHybridGaussianConditional.cpp | 67 ++++++++++--------- .../hybrid/tests/testHybridGaussianFactor.cpp | 28 ++++---- .../tests/testHybridGaussianFactorGraph.cpp | 29 ++++---- gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 9 +-- .../tests/testHybridNonlinearFactor.cpp | 14 ++-- .../tests/testHybridNonlinearFactorGraph.cpp | 20 +++--- .../hybrid/tests/testHybridNonlinearISAM.cpp | 9 +-- python/gtsam/tests/test_HybridFactorGraph.py | 8 +-- 27 files changed, 153 insertions(+), 146 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 14a54c0b4..b24606640 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -49,7 +49,7 @@ std::function &, double)> prunerFunc( const DecisionTreeFactor &prunedDiscreteProbs, const HybridConditional &conditional) { // Get the discrete keys as sets for the decision tree - // and the Gaussian mixture. + // and the hybrid Gaussian conditional. std::set discreteProbsKeySet = DiscreteKeysAsSet(prunedDiscreteProbs.discreteKeys()); std::set conditionalKeySet = @@ -63,7 +63,7 @@ std::function &, double)> prunerFunc( // typecast so we can use this to get probability value DiscreteValues values(choices); - // Case where the Gaussian mixture has the same + // Case where the hybrid Gaussian conditional has the same // discrete keys as the decision tree. if (conditionalKeySet == discreteProbsKeySet) { if (prunedDiscreteProbs(values) == 0) { @@ -181,7 +181,7 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) { // Bayes Net and prune them as per prunedDiscreteProbs. for (auto &&conditional : *this) { if (auto gm = conditional->asMixture()) { - // Make a copy of the Gaussian mixture and prune it! + // Make a copy of the hybrid Gaussian conditional and prune it! auto prunedHybridGaussianConditional = std::make_shared(*gm); prunedHybridGaussianConditional->prune( diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 5c453c106..d5d50afd2 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -28,7 +28,8 @@ namespace gtsam { /** * A hybrid Bayes net is a collection of HybridConditionals, which can have - * discrete conditionals, Gaussian mixtures, or pure Gaussian conditionals. + * discrete conditionals, hybrid Gaussian conditionals, + * or pure Gaussian conditionals. * * @ingroup hybrid */ diff --git a/gtsam/hybrid/HybridBayesTree.cpp b/gtsam/hybrid/HybridBayesTree.cpp index da2645b5a..c26f226c8 100644 --- a/gtsam/hybrid/HybridBayesTree.cpp +++ b/gtsam/hybrid/HybridBayesTree.cpp @@ -205,9 +205,9 @@ void HybridBayesTree::prune(const size_t maxNrLeaves) { // If conditional is hybrid, we prune it. if (conditional->isHybrid()) { - auto gaussianMixture = conditional->asMixture(); + auto hybridGaussianCond = conditional->asMixture(); - gaussianMixture->prune(parentData.prunedDiscreteProbs); + hybridGaussianCond->prune(parentData.prunedDiscreteProbs); } return parentData; } diff --git a/gtsam/hybrid/HybridBayesTree.h b/gtsam/hybrid/HybridBayesTree.h index ab2d8a73d..0fe9ca6ea 100644 --- a/gtsam/hybrid/HybridBayesTree.h +++ b/gtsam/hybrid/HybridBayesTree.h @@ -99,8 +99,8 @@ class GTSAM_EXPORT HybridBayesTree : public BayesTree { /** * @brief Recursively optimize the BayesTree to produce a vector solution. * - * @param assignment The discrete values assignment to select the Gaussian - * mixtures. + * @param assignment The discrete values assignment to select + * the hybrid conditional. * @return VectorValues */ VectorValues optimize(const DiscreteValues& assignment) const; @@ -170,7 +170,7 @@ class BayesTreeOrphanWrapper : public HybridConditional { void print( const std::string& s = "", const KeyFormatter& formatter = DefaultKeyFormatter) const override { - clique->print(s + "stored clique", formatter); + clique->print(s + " stored clique ", formatter); } }; diff --git a/gtsam/hybrid/HybridConditional.cpp b/gtsam/hybrid/HybridConditional.cpp index ed8125c2b..7a4981500 100644 --- a/gtsam/hybrid/HybridConditional.cpp +++ b/gtsam/hybrid/HybridConditional.cpp @@ -50,11 +50,11 @@ HybridConditional::HybridConditional( /* ************************************************************************ */ HybridConditional::HybridConditional( - const std::shared_ptr &gaussianMixture) - : BaseFactor(gaussianMixture->continuousKeys(), - gaussianMixture->discreteKeys()), - BaseConditional(gaussianMixture->nrFrontals()) { - inner_ = gaussianMixture; + const std::shared_ptr &hybridGaussianCond) + : BaseFactor(hybridGaussianCond->continuousKeys(), + hybridGaussianCond->discreteKeys()), + BaseConditional(hybridGaussianCond->nrFrontals()) { + inner_ = hybridGaussianCond; } /* ************************************************************************ */ diff --git a/gtsam/hybrid/HybridConditional.h b/gtsam/hybrid/HybridConditional.h index fb51e95f6..0fc60451d 100644 --- a/gtsam/hybrid/HybridConditional.h +++ b/gtsam/hybrid/HybridConditional.h @@ -124,11 +124,11 @@ class GTSAM_EXPORT HybridConditional /** * @brief Construct a new Hybrid Conditional object * - * @param gaussianMixture Gaussian Mixture Conditional used to create the + * @param hybridGaussianCond Hybrid Gaussian Conditional used to create the * HybridConditional. */ HybridConditional( - const std::shared_ptr& gaussianMixture); + const std::shared_ptr& hybridGaussianCond); /// @} /// @name Testable @@ -148,7 +148,7 @@ class GTSAM_EXPORT HybridConditional /** * @brief Return HybridConditional as a HybridGaussianConditional - * @return nullptr if not a mixture + * @return nullptr if not a conditional * @return HybridGaussianConditional::shared_ptr otherwise */ HybridGaussianConditional::shared_ptr asMixture() const { diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 4c2739c86..ad5f70eb0 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -249,20 +249,20 @@ std::function &, const GaussianConditional::shared_ptr &)> HybridGaussianConditional::prunerFunc(const DecisionTreeFactor &discreteProbs) { // Get the discrete keys as sets for the decision tree - // and the gaussian mixture. + // and the hybrid gaussian conditional. auto discreteProbsKeySet = DiscreteKeysAsSet(discreteProbs.discreteKeys()); - auto gaussianMixtureKeySet = DiscreteKeysAsSet(this->discreteKeys()); + auto hybridGaussianCondKeySet = DiscreteKeysAsSet(this->discreteKeys()); - auto pruner = [discreteProbs, discreteProbsKeySet, gaussianMixtureKeySet]( + auto pruner = [discreteProbs, discreteProbsKeySet, hybridGaussianCondKeySet]( const Assignment &choices, const GaussianConditional::shared_ptr &conditional) -> GaussianConditional::shared_ptr { // typecast so we can use this to get probability value const DiscreteValues values(choices); - // Case where the gaussian mixture has the same + // Case where the hybrid gaussian conditional has the same // discrete keys as the decision tree. - if (gaussianMixtureKeySet == discreteProbsKeySet) { + if (hybridGaussianCondKeySet == discreteProbsKeySet) { if (discreteProbs(values) == 0.0) { // empty aka null pointer std::shared_ptr null; @@ -274,7 +274,7 @@ HybridGaussianConditional::prunerFunc(const DecisionTreeFactor &discreteProbs) { std::vector set_diff; std::set_difference( discreteProbsKeySet.begin(), discreteProbsKeySet.end(), - gaussianMixtureKeySet.begin(), gaussianMixtureKeySet.end(), + hybridGaussianCondKeySet.begin(), hybridGaussianCondKeySet.end(), std::back_inserter(set_diff)); const std::vector assignments = diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 240160744..eb2bbb937 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -33,8 +33,8 @@ namespace gtsam { class HybridValues; /** - * @brief A conditional of gaussian mixtures indexed by discrete variables, as - * part of a Bayes Network. This is the result of the elimination of a + * @brief A conditional of gaussian conditionals indexed by discrete variables, + * as part of a Bayes Network. This is the result of the elimination of a * continuous variable in a hybrid scheme, such that the remaining variables are * discrete+continuous. * @@ -107,7 +107,7 @@ class GTSAM_EXPORT HybridGaussianConditional const Conditionals &conditionals); /** - * @brief Make a Gaussian Mixture from a vector of Gaussian conditionals. + * @brief Make a Hybrid Gaussian Conditional from a vector of Gaussian conditionals. * The DecisionTree-based constructor is preferred over this one. * * @param continuousFrontals The continuous frontal variables @@ -152,8 +152,8 @@ class GTSAM_EXPORT HybridGaussianConditional double logNormalizationConstant() const override { return logConstant_; } /** - * Create a likelihood factor for a Gaussian mixture, return a Mixture factor - * on the parents. + * Create a likelihood factor for a hybrid Gaussian conditional, + * return a hybrid Gaussian factor on the parents. */ std::shared_ptr likelihood( const VectorValues &given) const; @@ -172,9 +172,9 @@ class GTSAM_EXPORT HybridGaussianConditional const VectorValues &continuousValues) const; /** - * @brief Compute the error of this Gaussian Mixture. + * @brief Compute the error of this hybrid Gaussian conditional. * - * This requires some care, as different mixture components may have + * This requires some care, as different components may have * different normalization constants. Let's consider p(x|y,m), where m is * discrete. We need the error to satisfy the invariant: * @@ -209,7 +209,7 @@ class GTSAM_EXPORT HybridGaussianConditional const VectorValues &continuousValues) const; /** - * @brief Compute the logProbability of this Gaussian Mixture. + * @brief Compute the logProbability of this hybrid Gaussian conditional. * * @param values Continuous values and discrete assignment. * @return double diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 5f9cd925e..8c73b7952 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -37,13 +37,13 @@ class VectorValues; using GaussianFactorValuePair = std::pair; /** - * @brief Implementation of a discrete conditional mixture factor. + * @brief Implementation of a discrete-conditioned hybrid factor. * Implements a joint discrete-continuous factor where the discrete variable - * serves to "select" a mixture component corresponding to a GaussianFactor type - * of measurement. + * serves to "select" a component corresponding to a GaussianFactor. * - * Represents the underlying Gaussian mixture as a Decision Tree, where the set - * of discrete variables indexes to the continuous gaussian distribution. + * Represents the underlying hybrid Gaussian components as a Decision Tree, + * where the set of discrete variables indexes to + * the continuous gaussian distribution. * * @ingroup hybrid */ @@ -80,7 +80,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { HybridGaussianFactor() = default; /** - * @brief Construct a new Gaussian mixture factor. + * @brief Construct a new hybrid Gaussian factor. * * @param continuousKeys A vector of keys representing continuous variables. * @param discreteKeys A vector of keys representing discrete variables and diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 362150745..06d4fa9d7 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -437,8 +437,8 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, const Ordering &frontalKeys) { // NOTE: Because we are in the Conditional Gaussian regime there are only // a few cases: - // 1. continuous variable, make a Gaussian Mixture if there are hybrid - // factors; + // 1. continuous variable, make a hybrid Gaussian conditional if there are + // hybrid factors; // 2. continuous variable, we make a Gaussian Factor if there are no hybrid // factors; // 3. discrete variable, no continuous factor is allowed @@ -550,9 +550,10 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::errorTree( f = hc->inner(); } - if (auto gaussianMixture = dynamic_pointer_cast(f)) { + if (auto hybridGaussianCond = + dynamic_pointer_cast(f)) { // Compute factor error and add it. - error_tree = error_tree + gaussianMixture->errorTree(continuousValues); + error_tree = error_tree + hybridGaussianCond->errorTree(continuousValues); } else if (auto gaussian = dynamic_pointer_cast(f)) { // If continuous only, get the (double) error // and add it to the error_tree diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 911058437..d7aa1042f 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -210,7 +210,7 @@ class GTSAM_EXPORT HybridGaussianFactorGraph * @brief Create a decision tree of factor graphs out of this hybrid factor * graph. * - * For example, if there are two mixture factors, one with a discrete key A + * For example, if there are two hybrid factors, one with a discrete key A * and one with a discrete key B, then the decision tree will have two levels, * one for A and one for B. The leaves of the tree will be the Gaussian * factors that have only continuous keys. diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 1120fc948..2394beeb4 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -37,11 +37,10 @@ namespace gtsam { using NonlinearFactorValuePair = std::pair; /** - * @brief Implementation of a discrete conditional mixture factor. + * @brief Implementation of a discrete-conditioned hybrid factor. * * Implements a joint discrete-continuous factor where the discrete variable - * serves to "select" a mixture component corresponding to a NonlinearFactor - * type of measurement. + * serves to "select" a hybrid component corresponding to a NonlinearFactor. * * This class stores all factors as HybridFactors which can then be typecast to * one of (NonlinearFactor, GaussianFactor) which can then be checked to perform diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp index 78370a157..6d19ef156 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -151,7 +151,7 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize( if (!f) { continue; } - // Check if it is a nonlinear mixture factor + // Check if it is a hybrid nonlinear factor if (auto mf = dynamic_pointer_cast(f)) { const HybridGaussianFactor::shared_ptr& gmf = mf->linearize(continuousValues); diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index 546837bf9..1df978d65 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -100,7 +100,7 @@ HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph, // If hybridBayesNet is not empty, // it means we have conditionals to add to the factor graph. if (!hybridBayesNet.empty()) { - // We add all relevant conditional mixtures on the last continuous variable + // We add all relevant hybrid conditionals on the last continuous variable // in the previous `hybridBayesNet` to the graph // Conditionals to remove from the bayes net diff --git a/gtsam/hybrid/HybridSmoother.h b/gtsam/hybrid/HybridSmoother.h index fc40e0297..267746ab6 100644 --- a/gtsam/hybrid/HybridSmoother.h +++ b/gtsam/hybrid/HybridSmoother.h @@ -34,7 +34,7 @@ class GTSAM_EXPORT HybridSmoother { * Given new factors, perform an incremental update. * The relevant densities in the `hybridBayesNet` will be added to the input * graph (fragment), and then eliminated according to the `ordering` - * presented. The remaining factor graph contains Gaussian mixture factors + * presented. The remaining factor graph contains hybrid Gaussian factors * that are not connected to the variables in the ordering, or a single * discrete factor on all discrete keys, plus all discrete factors in the * original graph. @@ -68,7 +68,13 @@ class GTSAM_EXPORT HybridSmoother { const HybridGaussianFactorGraph& graph, const HybridBayesNet& hybridBayesNet, const Ordering& ordering) const; - /// Get the Gaussian Mixture from the Bayes Net posterior at `index`. + /** + * @brief Get the hybrid Gaussian conditional from + * the Bayes Net posterior at `index`. + * + * @param index Indexing value. + * @return HybridGaussianConditional::shared_ptr + */ HybridGaussianConditional::shared_ptr gaussianMixture(size_t index) const; /// Return the Bayes Net posterior. diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index b3bb14c8f..09905bf79 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -40,7 +40,7 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1, bool manyModes = false) { HybridBayesNet bayesNet; - // Create Gaussian mixture z_i = x0 + noise for each measurement. + // Create hybrid Gaussian factor z_i = x0 + noise for each measurement. for (size_t i = 0; i < num_measurements; i++) { const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode; std::vector conditionals{ diff --git a/gtsam/hybrid/tests/testHybridConditional.cpp b/gtsam/hybrid/tests/testHybridConditional.cpp index 78dbd314c..b9c62c89d 100644 --- a/gtsam/hybrid/tests/testHybridConditional.cpp +++ b/gtsam/hybrid/tests/testHybridConditional.cpp @@ -44,8 +44,8 @@ TEST(HybridConditional, Invariants) { CHECK(hc0->isHybrid()); // Check invariants as a HybridGaussianConditional. - const auto mixture = hc0->asMixture(); - EXPECT(HybridGaussianConditional::CheckInvariants(*mixture, values)); + const auto conditional = hc0->asMixture(); + EXPECT(HybridGaussianConditional::CheckInvariants(*conditional, values)); // Check invariants as a HybridConditional. EXPECT(HybridConditional::CheckInvariants(*hc0, values)); diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index ee48ad5d8..6f28f639d 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -429,7 +429,7 @@ static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() { nfg.emplace_shared>(X(0), 0.0, noise_model); nfg.emplace_shared>(X(1), 1.0, noise_model); - // Add mixture factor: + // Add hybrid nonlinear factor: DiscreteKey m(M(0), 2); const auto zero_motion = std::make_shared>(X(0), X(1), 0, noise_model); diff --git a/gtsam/hybrid/tests/testHybridFactorGraph.cpp b/gtsam/hybrid/tests/testHybridFactorGraph.cpp index 8a2e3be3c..20719b739 100644 --- a/gtsam/hybrid/tests/testHybridFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridFactorGraph.cpp @@ -50,7 +50,7 @@ TEST(HybridFactorGraph, Keys) { // Add factor between x0 and x1 hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - // Add a gaussian mixture factor ϕ(x1, c1) + // Add a hybrid Gaussian factor ϕ(x1, c1) DiscreteKey m1(M(1), 2); DecisionTree dt( M(1), {std::make_shared(X(1), I_3x3, Z_3x1), 0.0}, diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index d01158413..406203db8 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -52,9 +52,8 @@ const std::vector conditionals{ commonSigma), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), commonSigma)}; -const HybridGaussianConditional mixture( - {Z(0)}, {X(0)}, {mode}, - HybridGaussianConditional::Conditionals({mode}, conditionals)); +const HybridGaussianConditional hybrid_conditional({Z(0)}, {X(0)}, mode, + conditionals); } // namespace equal_constants /* ************************************************************************* */ @@ -62,21 +61,21 @@ const HybridGaussianConditional mixture( TEST(HybridGaussianConditional, Invariants) { using namespace equal_constants; - // Check that the mixture normalization constant is the max of all constants - // which are all equal, in this case, hence: - const double K = mixture.logNormalizationConstant(); + // Check that the conditional normalization constant is the max of all + // constants which are all equal, in this case, hence: + const double K = hybrid_conditional.logNormalizationConstant(); EXPECT_DOUBLES_EQUAL(K, conditionals[0]->logNormalizationConstant(), 1e-8); EXPECT_DOUBLES_EQUAL(K, conditionals[1]->logNormalizationConstant(), 1e-8); - EXPECT(HybridGaussianConditional::CheckInvariants(mixture, hv0)); - EXPECT(HybridGaussianConditional::CheckInvariants(mixture, hv1)); + EXPECT(HybridGaussianConditional::CheckInvariants(hybrid_conditional, hv0)); + EXPECT(HybridGaussianConditional::CheckInvariants(hybrid_conditional, hv1)); } /* ************************************************************************* */ /// Check LogProbability. TEST(HybridGaussianConditional, LogProbability) { using namespace equal_constants; - auto actual = mixture.logProbability(vv); + auto actual = hybrid_conditional.logProbability(vv); // Check result. std::vector discrete_keys = {mode}; @@ -90,7 +89,7 @@ TEST(HybridGaussianConditional, LogProbability) { for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; EXPECT_DOUBLES_EQUAL(conditionals[mode]->logProbability(vv), - mixture.logProbability(hv), 1e-8); + hybrid_conditional.logProbability(hv), 1e-8); } } @@ -98,7 +97,7 @@ TEST(HybridGaussianConditional, LogProbability) { /// Check error. TEST(HybridGaussianConditional, Error) { using namespace equal_constants; - auto actual = mixture.errorTree(vv); + auto actual = hybrid_conditional.errorTree(vv); // Check result. std::vector discrete_keys = {mode}; @@ -111,8 +110,8 @@ TEST(HybridGaussianConditional, Error) { // Check for non-tree version. for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv), mixture.error(hv), - 1e-8); + EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv), + hybrid_conditional.error(hv), 1e-8); } } @@ -123,11 +122,14 @@ TEST(HybridGaussianConditional, Likelihood) { using namespace equal_constants; // Compute likelihood - auto likelihood = mixture.likelihood(vv); + auto likelihood = hybrid_conditional.likelihood(vv); - // Check that the mixture error and the likelihood error are the same. - EXPECT_DOUBLES_EQUAL(mixture.error(hv0), likelihood->error(hv0), 1e-8); - EXPECT_DOUBLES_EQUAL(mixture.error(hv1), likelihood->error(hv1), 1e-8); + // Check that the hybrid conditional error and the likelihood error are the + // same. + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv0), likelihood->error(hv0), + 1e-8); + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), likelihood->error(hv1), + 1e-8); // Check that likelihood error is as expected, i.e., just the errors of the // individual likelihoods, in the `equal_constants` case. @@ -141,7 +143,8 @@ TEST(HybridGaussianConditional, Likelihood) { std::vector ratio(2); for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - ratio[mode] = std::exp(-likelihood->error(hv)) / mixture.evaluate(hv); + ratio[mode] = + std::exp(-likelihood->error(hv)) / hybrid_conditional.evaluate(hv); } EXPECT_DOUBLES_EQUAL(ratio[0], ratio[1], 1e-8); } @@ -155,16 +158,15 @@ const std::vector conditionals{ 0.5), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), 3.0)}; -const HybridGaussianConditional mixture( - {Z(0)}, {X(0)}, {mode}, - HybridGaussianConditional::Conditionals({mode}, conditionals)); +const HybridGaussianConditional hybrid_conditional({Z(0)}, {X(0)}, mode, + conditionals); } // namespace mode_dependent_constants /* ************************************************************************* */ // Create a test for continuousParents. TEST(HybridGaussianConditional, ContinuousParents) { using namespace mode_dependent_constants; - const KeyVector continuousParentKeys = mixture.continuousParents(); + const KeyVector continuousParentKeys = hybrid_conditional.continuousParents(); // Check that the continuous parent keys are correct: EXPECT(continuousParentKeys.size() == 1); EXPECT(continuousParentKeys[0] == X(0)); @@ -177,12 +179,14 @@ TEST(HybridGaussianConditional, Likelihood2) { using namespace mode_dependent_constants; // Compute likelihood - auto likelihood = mixture.likelihood(vv); + auto likelihood = hybrid_conditional.likelihood(vv); - // Check that the mixture error and the likelihood error are as expected, - // this invariant is the same as the equal noise case: - EXPECT_DOUBLES_EQUAL(mixture.error(hv0), likelihood->error(hv0), 1e-8); - EXPECT_DOUBLES_EQUAL(mixture.error(hv1), likelihood->error(hv1), 1e-8); + // Check that the hybrid conditional error and the likelihood error are as + // expected, this invariant is the same as the equal noise case: + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv0), likelihood->error(hv0), + 1e-8); + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), likelihood->error(hv1), + 1e-8); // Check the detailed JacobianFactor calculation for mode==1. { @@ -195,7 +199,7 @@ TEST(HybridGaussianConditional, Likelihood2) { CHECK(jf1->rows() == 2); // Check that the constant C1 is properly encoded in the JacobianFactor. - const double C1 = mixture.logNormalizationConstant() - + const double C1 = hybrid_conditional.logNormalizationConstant() - conditionals[1]->logNormalizationConstant(); const double c1 = std::sqrt(2.0 * C1); Vector expected_unwhitened(2); @@ -209,15 +213,16 @@ TEST(HybridGaussianConditional, Likelihood2) { Vector actual_whitened = jf1->error_vector(vv); EXPECT(assert_equal(expected_whitened, actual_whitened)); - // Check that the error is equal to the mixture error: - EXPECT_DOUBLES_EQUAL(mixture.error(hv1), jf1->error(hv1), 1e-8); + // Check that the error is equal to the conditional error: + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), jf1->error(hv1), 1e-8); } // Check that the ratio of probPrime to evaluate is the same for all modes. std::vector ratio(2); for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - ratio[mode] = std::exp(-likelihood->error(hv)) / mixture.evaluate(hv); + ratio[mode] = + std::exp(-likelihood->error(hv)) / hybrid_conditional.evaluate(hv); } EXPECT_DOUBLES_EQUAL(ratio[0], ratio[1], 1e-8); } diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 850a1e4d1..9d7a09806 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -45,7 +45,7 @@ using symbol_shorthand::X; using symbol_shorthand::Z; /* ************************************************************************* */ -// Check iterators of empty mixture. +// Check iterators of empty hybrid factor. TEST(HybridGaussianFactor, Constructor) { HybridGaussianFactor factor; HybridGaussianFactor::const_iterator const_it = factor.begin(); @@ -55,7 +55,7 @@ TEST(HybridGaussianFactor, Constructor) { } /* ************************************************************************* */ -// "Add" two mixture factors together. +// "Add" two hybrid factors together. TEST(HybridGaussianFactor, Sum) { DiscreteKey m1(1, 2), m2(2, 3); @@ -78,20 +78,20 @@ TEST(HybridGaussianFactor, Sum) { // TODO(Frank): why specify keys at all? And: keys in factor should be *all* // keys, deviating from Kevin's scheme. Should we index DT on DiscreteKey? // Design review! - HybridGaussianFactor mixtureFactorA({X(1), X(2)}, {m1}, factorsA); - HybridGaussianFactor mixtureFactorB({X(1), X(3)}, {m2}, factorsB); + HybridGaussianFactor hybridFactorA({X(1), X(2)}, {m1}, factorsA); + HybridGaussianFactor hybridFactorB({X(1), X(3)}, {m2}, factorsB); // Check that number of keys is 3 - EXPECT_LONGS_EQUAL(3, mixtureFactorA.keys().size()); + EXPECT_LONGS_EQUAL(3, hybridFactorA.keys().size()); // Check that number of discrete keys is 1 - EXPECT_LONGS_EQUAL(1, mixtureFactorA.discreteKeys().size()); + EXPECT_LONGS_EQUAL(1, hybridFactorA.discreteKeys().size()); - // Create sum of two mixture factors: it will be a decision tree now on both + // Create sum of two hybrid factors: it will be a decision tree now on both // discrete variables m1 and m2: GaussianFactorGraphTree sum; - sum += mixtureFactorA; - sum += mixtureFactorB; + sum += hybridFactorA; + sum += hybridFactorB; // Let's check that this worked: Assignment mode; @@ -112,7 +112,7 @@ TEST(HybridGaussianFactor, Printing) { auto f11 = std::make_shared(X(1), A1, X(2), A2, b); std::vector factors{{f10, 0.0}, {f11, 0.0}}; - HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors); + HybridGaussianFactor hybridFactor({X(1), X(2)}, {m1}, factors); std::string expected = R"(HybridGaussianFactor @@ -144,7 +144,7 @@ Hybrid [x1 x2; 1]{ } )"; - EXPECT(assert_print_equal(expected, mixtureFactor)); + EXPECT(assert_print_equal(expected, hybridFactor)); } /* ************************************************************************* */ @@ -181,7 +181,7 @@ TEST(HybridGaussianFactor, Error) { auto f1 = std::make_shared(X(1), A11, X(2), A12, b); std::vector factors{{f0, 0.0}, {f1, 0.0}}; - HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors); + HybridGaussianFactor hybridFactor({X(1), X(2)}, {m1}, factors); VectorValues continuousValues; continuousValues.insert(X(1), Vector2(0, 0)); @@ -189,7 +189,7 @@ TEST(HybridGaussianFactor, Error) { // error should return a tree of errors, with nodes for each discrete value. AlgebraicDecisionTree error_tree = - mixtureFactor.errorTree(continuousValues); + hybridFactor.errorTree(continuousValues); std::vector discrete_keys = {m1}; // Error values for regression test @@ -202,7 +202,7 @@ TEST(HybridGaussianFactor, Error) { DiscreteValues discreteValues; discreteValues[m1.first] = 1; EXPECT_DOUBLES_EQUAL( - 4.0, mixtureFactor.error({continuousValues, discreteValues}), 1e-9); + 4.0, hybridFactor.error({continuousValues, discreteValues}), 1e-9); } namespace test_gmm { diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index dc88c8b7e..1530069aa 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -69,8 +69,8 @@ TEST(HybridGaussianFactorGraph, Creation) { hfg.emplace_shared(X(0), I_3x3, Z_3x1); - // Define a gaussian mixture conditional P(x0|x1, c0) and add it to the factor - // graph + // Define a hybrid gaussian conditional P(x0|x1, c0) + // and add it to the factor graph. HybridGaussianConditional gm( {X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}), HybridGaussianConditional::Conditionals( @@ -125,7 +125,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { // Add factor between x0 and x1 hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - // Add a gaussian mixture factor ϕ(x1, c1) + // Add a hybrid gaussian factor ϕ(x1, c1) DiscreteKey m1(M(1), 2); DecisionTree dt( M(1), {std::make_shared(X(1), I_3x3, Z_3x1), 0.0}, @@ -720,9 +720,9 @@ TEST(HybridGaussianFactorGraph, assembleGraphTree) { // Create expected decision tree with two factor graphs: - // Get mixture factor: - auto mixture = fg.at(0); - CHECK(mixture); + // Get hybrid factor: + auto hybrid = fg.at(0); + CHECK(hybrid); // Get prior factor: const auto gf = fg.at(1); @@ -737,8 +737,8 @@ TEST(HybridGaussianFactorGraph, assembleGraphTree) { // Expected decision tree with two factor graphs: // f(x0;mode=0)P(x0) and f(x0;mode=1)P(x0) GaussianFactorGraphTree expected{ - M(0), GaussianFactorGraph(std::vector{(*mixture)(d0), prior}), - GaussianFactorGraph(std::vector{(*mixture)(d1), prior})}; + M(0), GaussianFactorGraph(std::vector{(*hybrid)(d0), prior}), + GaussianFactorGraph(std::vector{(*hybrid)(d1), prior})}; EXPECT(assert_equal(expected(d0), actual(d0), 1e-5)); EXPECT(assert_equal(expected(d1), actual(d1), 1e-5)); @@ -802,7 +802,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) { // Create expected Bayes Net: HybridBayesNet expectedBayesNet; - // Create Gaussian mixture on X(0). + // Create hybrid Gaussian factor on X(0). using tiny::mode; // regression, but mean checked to be 5.0 in both cases: const auto conditional0 = std::make_shared( @@ -835,7 +835,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { const DiscreteKey mode{M(0), 2}; HybridBayesNet bn; - // Create Gaussian mixture z_0 = x0 + noise for each measurement. + // Create hybrid Gaussian factor z_0 = x0 + noise for each measurement. std::vector conditionals{ GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 0.5)}; @@ -863,7 +863,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { // Create expected Bayes Net: HybridBayesNet expectedBayesNet; - // Create Gaussian mixture on X(0). + // Create hybrid Gaussian factor on X(0). // regression, but mean checked to be 5.0 in both cases: const auto conditional0 = std::make_shared( X(0), Vector1(10.1379), I_1x1 * 2.02759), @@ -900,7 +900,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) { // Create expected Bayes Net: HybridBayesNet expectedBayesNet; - // Create Gaussian mixture on X(0). + // Create hybrid Gaussian factor on X(0). using tiny::mode; // regression, but mean checked to be 5.0 in both cases: const auto conditional0 = std::make_shared( @@ -953,7 +953,7 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { // Add measurements: for (size_t t : {0, 1, 2}) { - // Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t): + // Create hybrid Gaussian factor on Z(t) conditioned on X(t) and mode N(t): const auto noise_mode_t = DiscreteKey{N(t), 2}; std::vector conditionals{ GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), Z_1x1, 0.5), @@ -970,7 +970,8 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { // Add motion models: for (size_t t : {2, 1}) { - // Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1): + // Create hybrid Gaussian factor on X(t) conditioned on X(t-1) + // and mode M(t-1): const auto motion_model_t = DiscreteKey{M(t), 2}; std::vector conditionals{ GaussianConditional::sharedMeanAndStddev(X(t), I_1x1, X(t - 1), Z_1x1, diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index e2a10a783..ba408fbbc 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -422,9 +422,8 @@ TEST(HybridGaussianISAM, NonTrivial) { noise_model); std::vector> components = { {moving, 0.0}, {still, 0.0}}; - auto mixtureFactor = std::make_shared( + fg.emplace_shared( contKeys, gtsam::DiscreteKey(M(1), 2), components); - fg.push_back(mixtureFactor); // Add equivalent of ImuFactor fg.emplace_shared>(X(0), X(1), Pose2(1.0, 0.0, 0), @@ -462,9 +461,8 @@ TEST(HybridGaussianISAM, NonTrivial) { moving = std::make_shared(W(1), W(2), odometry, noise_model); components = {{moving, 0.0}, {still, 0.0}}; - mixtureFactor = std::make_shared( + fg.emplace_shared( contKeys, gtsam::DiscreteKey(M(2), 2), components); - fg.push_back(mixtureFactor); // Add equivalent of ImuFactor fg.emplace_shared>(X(1), X(2), Pose2(1.0, 0.0, 0), @@ -505,9 +503,8 @@ TEST(HybridGaussianISAM, NonTrivial) { moving = std::make_shared(W(2), W(3), odometry, noise_model); components = {{moving, 0.0}, {still, 0.0}}; - mixtureFactor = std::make_shared( + fg.emplace_shared( contKeys, gtsam::DiscreteKey(M(3), 2), components); - fg.push_back(mixtureFactor); // Add equivalent of ImuFactor fg.emplace_shared>(X(2), X(3), Pose2(1.0, 0.0, 0), diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp index a4c1b4cc7..92efa680a 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp @@ -35,7 +35,7 @@ using symbol_shorthand::M; using symbol_shorthand::X; /* ************************************************************************* */ -// Check iterators of empty mixture. +// Check iterators of empty hybrid factor. TEST(HybridNonlinearFactor, Constructor) { HybridNonlinearFactor factor; HybridNonlinearFactor::const_iterator const_it = factor.begin(); @@ -60,7 +60,7 @@ TEST(HybridNonlinearFactor, Printing) { std::make_shared>(X(1), X(2), between1, model); std::vector factors{{f0, 0.0}, {f1, 0.0}}; - HybridNonlinearFactor mixtureFactor({X(1), X(2)}, {m1}, factors); + HybridNonlinearFactor hybridFactor({X(1), X(2)}, {m1}, factors); std::string expected = R"(Hybrid [x1 x2; 1] @@ -69,7 +69,7 @@ HybridNonlinearFactor 0 Leaf Nonlinear factor on 2 keys 1 Leaf Nonlinear factor on 2 keys )"; - EXPECT(assert_print_equal(expected, mixtureFactor)); + EXPECT(assert_print_equal(expected, hybridFactor)); } /* ************************************************************************* */ @@ -94,14 +94,14 @@ static HybridNonlinearFactor getHybridNonlinearFactor() { /* ************************************************************************* */ // Test the error of the HybridNonlinearFactor TEST(HybridNonlinearFactor, Error) { - auto mixtureFactor = getHybridNonlinearFactor(); + auto hybridFactor = getHybridNonlinearFactor(); Values continuousValues; continuousValues.insert(X(1), 0); continuousValues.insert(X(2), 1); AlgebraicDecisionTree error_tree = - mixtureFactor.errorTree(continuousValues); + hybridFactor.errorTree(continuousValues); DiscreteKey m1(1, 2); std::vector discrete_keys = {m1}; @@ -114,8 +114,8 @@ TEST(HybridNonlinearFactor, Error) { /* ************************************************************************* */ // Test dim of the HybridNonlinearFactor TEST(HybridNonlinearFactor, Dim) { - auto mixtureFactor = getHybridNonlinearFactor(); - EXPECT_LONGS_EQUAL(1, mixtureFactor.dim()); + auto hybridFactor = getHybridNonlinearFactor(); + EXPECT_LONGS_EQUAL(1, hybridFactor.dim()); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 3631ac44e..36821a782 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -281,7 +281,7 @@ TEST(HybridFactorGraph, EliminationTree) { TEST(GaussianElimination, Eliminate_x0) { Switching self(3); - // Gather factors on x1, has a simple Gaussian and a mixture factor. + // Gather factors on x1, has a simple Gaussian and a hybrid factor. HybridGaussianFactorGraph factors; // Add gaussian prior factors.push_back(self.linearizedFactorGraph[0]); @@ -306,7 +306,7 @@ TEST(GaussianElimination, Eliminate_x0) { TEST(HybridsGaussianElimination, Eliminate_x1) { Switching self(3); - // Gather factors on x1, will be two mixture factors (with x0 and x2, resp.). + // Gather factors on x1, will be two hybrid factors (with x0 and x2, resp.). HybridGaussianFactorGraph factors; factors.push_back(self.linearizedFactorGraph[1]); // involves m0 factors.push_back(self.linearizedFactorGraph[2]); // involves m1 @@ -349,18 +349,18 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) { // Eliminate x0 const Ordering ordering{X(0), X(1)}; - const auto [hybridConditionalMixture, factorOnModes] = + const auto [hybridConditional, factorOnModes] = EliminateHybrid(factors, ordering); - auto gaussianConditionalMixture = + auto hybridGaussianConditional = dynamic_pointer_cast( - hybridConditionalMixture->inner()); + hybridConditional->inner()); - CHECK(gaussianConditionalMixture); + CHECK(hybridGaussianConditional); // Frontals = [x0, x1] - EXPECT_LONGS_EQUAL(2, gaussianConditionalMixture->nrFrontals()); + EXPECT_LONGS_EQUAL(2, hybridGaussianConditional->nrFrontals()); // 1 parent, which is the mode - EXPECT_LONGS_EQUAL(1, gaussianConditionalMixture->nrParents()); + EXPECT_LONGS_EQUAL(1, hybridGaussianConditional->nrParents()); // This is now a discreteFactor auto discreteFactor = dynamic_pointer_cast(factorOnModes); @@ -849,8 +849,8 @@ namespace test_relinearization { * This way we can directly provide the likelihoods and * then perform (re-)linearization. * - * @param means The means of the GaussianMixtureFactor components. - * @param sigmas The covariances of the GaussianMixtureFactor components. + * @param means The means of the HybridGaussianFactor components. + * @param sigmas The covariances of the HybridGaussianFactor components. * @param m1 The discrete key. * @param x0_measurement A measurement on X0 * @return HybridGaussianFactorGraph diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index 6932508df..e37c6f170 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -441,9 +441,8 @@ TEST(HybridNonlinearISAM, NonTrivial) { noise_model); std::vector> components = { {moving, 0.0}, {still, 0.0}}; - auto mixtureFactor = std::make_shared( + fg.emplace_shared( contKeys, gtsam::DiscreteKey(M(1), 2), components); - fg.push_back(mixtureFactor); // Add equivalent of ImuFactor fg.emplace_shared>(X(0), X(1), Pose2(1.0, 0.0, 0), @@ -481,9 +480,8 @@ TEST(HybridNonlinearISAM, NonTrivial) { moving = std::make_shared(W(1), W(2), odometry, noise_model); components = {{moving, 0.0}, {still, 0.0}}; - mixtureFactor = std::make_shared( + fg.emplace_shared( contKeys, gtsam::DiscreteKey(M(2), 2), components); - fg.push_back(mixtureFactor); // Add equivalent of ImuFactor fg.emplace_shared>(X(1), X(2), Pose2(1.0, 0.0, 0), @@ -524,9 +522,8 @@ TEST(HybridNonlinearISAM, NonTrivial) { moving = std::make_shared(W(2), W(3), odometry, noise_model); components = {{moving, 0.0}, {still, 0.0}}; - mixtureFactor = std::make_shared( + fg.emplace_shared( contKeys, gtsam::DiscreteKey(M(3), 2), components); - fg.push_back(mixtureFactor); // Add equivalent of ImuFactor fg.emplace_shared>(X(2), X(3), Pose2(1.0, 0.0, 0), diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index 5eafc05ae..4ec1aec1e 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -46,9 +46,9 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): self.assertEqual(hbn.size(), 2) - mixture = hbn.at(0).inner() - self.assertIsInstance(mixture, HybridGaussianConditional) - self.assertEqual(len(mixture.keys()), 2) + hybridCond = hbn.at(0).inner() + self.assertIsInstance(hybridCond, HybridGaussianConditional) + self.assertEqual(len(hybridCond.keys()), 2) discrete_conditional = hbn.at(hbn.size() - 1).inner() self.assertIsInstance(discrete_conditional, DiscreteConditional) @@ -90,7 +90,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): # Create mode key: 0 is low-noise, 1 is high-noise. mode = (M(0), 2) - # Create Gaussian mixture Z(0) = X(0) + noise for each measurement. + # Create hybrid Gaussian conditional Z(0) = X(0) + noise for each measurement. I_1x1 = np.eye(1) for i in range(num_measurements): conditional0 = GaussianConditional.FromMeanAndStddev(Z(i), From 6929d62300cdb1ad1b328deb7f1cdbda477b666e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 16 Sep 2024 14:42:35 -0400 Subject: [PATCH 253/398] rename asMixture to asHybrid --- gtsam/hybrid/HybridBayesNet.cpp | 10 +++++----- gtsam/hybrid/HybridBayesTree.cpp | 4 ++-- gtsam/hybrid/HybridConditional.cpp | 10 +++++----- gtsam/hybrid/HybridConditional.h | 2 +- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 4 ++-- gtsam/hybrid/HybridSmoother.cpp | 2 +- gtsam/hybrid/hybrid.i | 2 +- gtsam/hybrid/tests/testHybridBayesNet.cpp | 14 +++++++------- gtsam/hybrid/tests/testHybridConditional.cpp | 2 +- gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 14 +++++++------- gtsam/hybrid/tests/testHybridNonlinearISAM.cpp | 14 +++++++------- 11 files changed, 39 insertions(+), 39 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index b24606640..6e96afb25 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -180,7 +180,7 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) { // Go through all the conditionals in the // Bayes Net and prune them as per prunedDiscreteProbs. for (auto &&conditional : *this) { - if (auto gm = conditional->asMixture()) { + if (auto gm = conditional->asHybrid()) { // Make a copy of the hybrid Gaussian conditional and prune it! auto prunedHybridGaussianConditional = std::make_shared(*gm); @@ -204,7 +204,7 @@ GaussianBayesNet HybridBayesNet::choose( const DiscreteValues &assignment) const { GaussianBayesNet gbn; for (auto &&conditional : *this) { - if (auto gm = conditional->asMixture()) { + if (auto gm = conditional->asHybrid()) { // If conditional is hybrid, select based on assignment. gbn.push_back((*gm)(assignment)); } else if (auto gc = conditional->asGaussian()) { @@ -291,7 +291,7 @@ AlgebraicDecisionTree HybridBayesNet::errorTree( // Iterate over each conditional. for (auto &&conditional : *this) { - if (auto gm = conditional->asMixture()) { + if (auto gm = conditional->asHybrid()) { // If conditional is hybrid, compute error for all assignments. result = result + gm->errorTree(continuousValues); @@ -321,7 +321,7 @@ AlgebraicDecisionTree HybridBayesNet::logProbability( // Iterate over each conditional. for (auto &&conditional : *this) { - if (auto gm = conditional->asMixture()) { + if (auto gm = conditional->asHybrid()) { // If conditional is hybrid, select based on assignment and compute // logProbability. result = result + gm->logProbability(continuousValues); @@ -369,7 +369,7 @@ HybridGaussianFactorGraph HybridBayesNet::toFactorGraph( if (conditional->frontalsIn(measurements)) { if (auto gc = conditional->asGaussian()) { fg.push_back(gc->likelihood(measurements)); - } else if (auto gm = conditional->asMixture()) { + } else if (auto gm = conditional->asHybrid()) { fg.push_back(gm->likelihood(measurements)); } else { throw std::runtime_error("Unknown conditional type"); diff --git a/gtsam/hybrid/HybridBayesTree.cpp b/gtsam/hybrid/HybridBayesTree.cpp index c26f226c8..9aee6dcf8 100644 --- a/gtsam/hybrid/HybridBayesTree.cpp +++ b/gtsam/hybrid/HybridBayesTree.cpp @@ -109,7 +109,7 @@ struct HybridAssignmentData { GaussianConditional::shared_ptr conditional; if (hybrid_conditional->isHybrid()) { - conditional = (*hybrid_conditional->asMixture())(parentData.assignment_); + conditional = (*hybrid_conditional->asHybrid())(parentData.assignment_); } else if (hybrid_conditional->isContinuous()) { conditional = hybrid_conditional->asGaussian(); } else { @@ -205,7 +205,7 @@ void HybridBayesTree::prune(const size_t maxNrLeaves) { // If conditional is hybrid, we prune it. if (conditional->isHybrid()) { - auto hybridGaussianCond = conditional->asMixture(); + auto hybridGaussianCond = conditional->asHybrid(); hybridGaussianCond->prune(parentData.prunedDiscreteProbs); } diff --git a/gtsam/hybrid/HybridConditional.cpp b/gtsam/hybrid/HybridConditional.cpp index 7a4981500..074534b8d 100644 --- a/gtsam/hybrid/HybridConditional.cpp +++ b/gtsam/hybrid/HybridConditional.cpp @@ -97,8 +97,8 @@ void HybridConditional::print(const std::string &s, bool HybridConditional::equals(const HybridFactor &other, double tol) const { const This *e = dynamic_cast(&other); if (e == nullptr) return false; - if (auto gm = asMixture()) { - auto other = e->asMixture(); + if (auto gm = asHybrid()) { + auto other = e->asHybrid(); return other != nullptr && gm->equals(*other, tol); } if (auto gc = asGaussian()) { @@ -119,7 +119,7 @@ double HybridConditional::error(const HybridValues &values) const { if (auto gc = asGaussian()) { return gc->error(values.continuous()); } - if (auto gm = asMixture()) { + if (auto gm = asHybrid()) { return gm->error(values); } if (auto dc = asDiscrete()) { @@ -134,7 +134,7 @@ double HybridConditional::logProbability(const HybridValues &values) const { if (auto gc = asGaussian()) { return gc->logProbability(values.continuous()); } - if (auto gm = asMixture()) { + if (auto gm = asHybrid()) { return gm->logProbability(values); } if (auto dc = asDiscrete()) { @@ -149,7 +149,7 @@ double HybridConditional::logNormalizationConstant() const { if (auto gc = asGaussian()) { return gc->logNormalizationConstant(); } - if (auto gm = asMixture()) { + if (auto gm = asHybrid()) { return gm->logNormalizationConstant(); // 0.0! } if (auto dc = asDiscrete()) { diff --git a/gtsam/hybrid/HybridConditional.h b/gtsam/hybrid/HybridConditional.h index 0fc60451d..f44ee2bf9 100644 --- a/gtsam/hybrid/HybridConditional.h +++ b/gtsam/hybrid/HybridConditional.h @@ -151,7 +151,7 @@ class GTSAM_EXPORT HybridConditional * @return nullptr if not a conditional * @return HybridGaussianConditional::shared_ptr otherwise */ - HybridGaussianConditional::shared_ptr asMixture() const { + HybridGaussianConditional::shared_ptr asHybrid() const { return std::dynamic_pointer_cast(inner_); } diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 06d4fa9d7..28a0c446f 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -115,7 +115,7 @@ void HybridGaussianFactorGraph::printErrors( } else { // Is hybrid auto conditionalComponent = - hc->asMixture()->operator()(values.discrete()); + hc->asHybrid()->operator()(values.discrete()); conditionalComponent->print(ss.str(), keyFormatter); std::cout << "error = " << conditionalComponent->error(values) << "\n"; @@ -184,7 +184,7 @@ GaussianFactorGraphTree HybridGaussianFactorGraph::assembleGraphTree() const { } else if (auto gm = dynamic_pointer_cast(f)) { result = gm->add(result); } else if (auto hc = dynamic_pointer_cast(f)) { - if (auto gm = hc->asMixture()) { + if (auto gm = hc->asHybrid()) { result = gm->add(result); } else if (auto g = hc->asGaussian()) { result = addGaussian(result, g); diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index 1df978d65..b898c0520 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -140,7 +140,7 @@ HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph, /* ************************************************************************* */ HybridGaussianConditional::shared_ptr HybridSmoother::gaussianMixture( size_t index) const { - return hybridBayesNet_.at(index)->asMixture(); + return hybridBayesNet_.at(index)->asHybrid(); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index 37bcefe81..500e618d7 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -65,7 +65,7 @@ virtual class HybridConditional { double logProbability(const gtsam::HybridValues& values) const; double evaluate(const gtsam::HybridValues& values) const; double operator()(const gtsam::HybridValues& values) const; - gtsam::HybridGaussianConditional* asMixture() const; + gtsam::HybridGaussianConditional* asHybrid() const; gtsam::GaussianConditional* asGaussian() const; gtsam::DiscreteConditional* asDiscrete() const; gtsam::Factor* inner(); diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index bbe2d3df5..42ccf1600 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -144,13 +144,13 @@ TEST(HybridBayesNet, Choose) { EXPECT_LONGS_EQUAL(4, gbn.size()); - EXPECT(assert_equal(*(*hybridBayesNet->at(0)->asMixture())(assignment), + EXPECT(assert_equal(*(*hybridBayesNet->at(0)->asHybrid())(assignment), *gbn.at(0))); - EXPECT(assert_equal(*(*hybridBayesNet->at(1)->asMixture())(assignment), + EXPECT(assert_equal(*(*hybridBayesNet->at(1)->asHybrid())(assignment), *gbn.at(1))); - EXPECT(assert_equal(*(*hybridBayesNet->at(2)->asMixture())(assignment), + EXPECT(assert_equal(*(*hybridBayesNet->at(2)->asHybrid())(assignment), *gbn.at(2))); - EXPECT(assert_equal(*(*hybridBayesNet->at(3)->asMixture())(assignment), + EXPECT(assert_equal(*(*hybridBayesNet->at(3)->asHybrid())(assignment), *gbn.at(3))); } @@ -280,9 +280,9 @@ TEST(HybridBayesNet, Pruning) { const DiscreteValues discrete_values{{M(0), 1}, {M(1), 1}}; const HybridValues hybridValues{delta.continuous(), discrete_values}; double logProbability = 0; - logProbability += posterior->at(0)->asMixture()->logProbability(hybridValues); - logProbability += posterior->at(1)->asMixture()->logProbability(hybridValues); - logProbability += posterior->at(2)->asMixture()->logProbability(hybridValues); + logProbability += posterior->at(0)->asHybrid()->logProbability(hybridValues); + logProbability += posterior->at(1)->asHybrid()->logProbability(hybridValues); + logProbability += posterior->at(2)->asHybrid()->logProbability(hybridValues); // NOTE(dellaert): the discrete errors were not added in logProbability tree! logProbability += posterior->at(3)->asDiscrete()->logProbability(hybridValues); diff --git a/gtsam/hybrid/tests/testHybridConditional.cpp b/gtsam/hybrid/tests/testHybridConditional.cpp index b9c62c89d..365dde3bc 100644 --- a/gtsam/hybrid/tests/testHybridConditional.cpp +++ b/gtsam/hybrid/tests/testHybridConditional.cpp @@ -44,7 +44,7 @@ TEST(HybridConditional, Invariants) { CHECK(hc0->isHybrid()); // Check invariants as a HybridGaussianConditional. - const auto conditional = hc0->asMixture(); + const auto conditional = hc0->asHybrid(); EXPECT(HybridGaussianConditional::CheckInvariants(*conditional, values)); // Check invariants as a HybridConditional. diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index ba408fbbc..72ab43a2a 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -333,13 +333,13 @@ TEST(HybridGaussianElimination, Incremental_approximate) { // each with 2, 4, 8, and 5 (pruned) leaves respetively. EXPECT_LONGS_EQUAL(4, incrementalHybrid.size()); EXPECT_LONGS_EQUAL( - 2, incrementalHybrid[X(0)]->conditional()->asMixture()->nrComponents()); + 2, incrementalHybrid[X(0)]->conditional()->asHybrid()->nrComponents()); EXPECT_LONGS_EQUAL( - 3, incrementalHybrid[X(1)]->conditional()->asMixture()->nrComponents()); + 3, incrementalHybrid[X(1)]->conditional()->asHybrid()->nrComponents()); EXPECT_LONGS_EQUAL( - 5, incrementalHybrid[X(2)]->conditional()->asMixture()->nrComponents()); + 5, incrementalHybrid[X(2)]->conditional()->asHybrid()->nrComponents()); EXPECT_LONGS_EQUAL( - 5, incrementalHybrid[X(3)]->conditional()->asMixture()->nrComponents()); + 5, incrementalHybrid[X(3)]->conditional()->asHybrid()->nrComponents()); /***** Run Round 2 *****/ HybridGaussianFactorGraph graph2; @@ -354,9 +354,9 @@ TEST(HybridGaussianElimination, Incremental_approximate) { // with 5 (pruned) leaves. CHECK_EQUAL(5, incrementalHybrid.size()); EXPECT_LONGS_EQUAL( - 5, incrementalHybrid[X(3)]->conditional()->asMixture()->nrComponents()); + 5, incrementalHybrid[X(3)]->conditional()->asHybrid()->nrComponents()); EXPECT_LONGS_EQUAL( - 5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents()); + 5, incrementalHybrid[X(4)]->conditional()->asHybrid()->nrComponents()); } /* ************************************************************************/ @@ -548,7 +548,7 @@ TEST(HybridGaussianISAM, NonTrivial) { // Test if pruning worked correctly by checking that we only have 3 leaves in // the last node. - auto lastConditional = inc[X(3)]->conditional()->asMixture(); + auto lastConditional = inc[X(3)]->conditional()->asHybrid(); EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents()); } diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index e37c6f170..86e045bb0 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -358,13 +358,13 @@ TEST(HybridNonlinearISAM, Incremental_approximate) { // each with 2, 4, 8, and 5 (pruned) leaves respetively. EXPECT_LONGS_EQUAL(4, bayesTree.size()); EXPECT_LONGS_EQUAL( - 2, bayesTree[X(0)]->conditional()->asMixture()->nrComponents()); + 2, bayesTree[X(0)]->conditional()->asHybrid()->nrComponents()); EXPECT_LONGS_EQUAL( - 3, bayesTree[X(1)]->conditional()->asMixture()->nrComponents()); + 3, bayesTree[X(1)]->conditional()->asHybrid()->nrComponents()); EXPECT_LONGS_EQUAL( - 5, bayesTree[X(2)]->conditional()->asMixture()->nrComponents()); + 5, bayesTree[X(2)]->conditional()->asHybrid()->nrComponents()); EXPECT_LONGS_EQUAL( - 5, bayesTree[X(3)]->conditional()->asMixture()->nrComponents()); + 5, bayesTree[X(3)]->conditional()->asHybrid()->nrComponents()); /***** Run Round 2 *****/ HybridGaussianFactorGraph graph2; @@ -382,9 +382,9 @@ TEST(HybridNonlinearISAM, Incremental_approximate) { // with 5 (pruned) leaves. CHECK_EQUAL(5, bayesTree.size()); EXPECT_LONGS_EQUAL( - 5, bayesTree[X(3)]->conditional()->asMixture()->nrComponents()); + 5, bayesTree[X(3)]->conditional()->asHybrid()->nrComponents()); EXPECT_LONGS_EQUAL( - 5, bayesTree[X(4)]->conditional()->asMixture()->nrComponents()); + 5, bayesTree[X(4)]->conditional()->asHybrid()->nrComponents()); } /* ************************************************************************/ @@ -569,7 +569,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { // Test if pruning worked correctly by checking that // we only have 3 leaves in the last node. - auto lastConditional = bayesTree[X(3)]->conditional()->asMixture(); + auto lastConditional = bayesTree[X(3)]->conditional()->asHybrid(); EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents()); } From 6c9f7ec7c7941d303a805fc2545dbc3ce9f810f3 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 18 Sep 2024 17:50:36 -0400 Subject: [PATCH 254/398] remove normlization stuff from HybridNonlinearFactor --- gtsam/hybrid/HybridNonlinearFactor.h | 59 +++------------------------- gtsam/hybrid/hybrid.i | 3 -- 2 files changed, 6 insertions(+), 56 deletions(-) diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 742b4c162..a7f607def 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -63,7 +63,6 @@ class HybridNonlinearFactor : public HybridFactor { private: /// Decision tree of Gaussian factors indexed by discrete keys. Factors factors_; - bool normalized_; public: HybridNonlinearFactor() = default; @@ -74,12 +73,10 @@ class HybridNonlinearFactor : public HybridFactor { * @param keys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Decision tree with of shared factors. - * @param normalized Flag indicating if the factor error is already - * normalized. */ HybridNonlinearFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, - const Factors& factors, bool normalized = false) - : Base(keys, discreteKeys), factors_(factors), normalized_(normalized) {} + const Factors& factors) + : Base(keys, discreteKeys), factors_(factors) {} /** * @brief Convenience constructor that generates the underlying factor @@ -94,15 +91,12 @@ class HybridNonlinearFactor : public HybridFactor { * @param keys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of nonlinear factor and scalar pairs. - * @param normalized Flag indicating if the factor error is already - * normalized. */ template HybridNonlinearFactor( const KeyVector& keys, const DiscreteKeys& discreteKeys, - const std::vector, double>>& factors, - bool normalized = false) - : Base(keys, discreteKeys), normalized_(normalized) { + const std::vector, double>>& factors) + : Base(keys, discreteKeys) { std::vector nonlinear_factors; KeySet continuous_keys_set(keys.begin(), keys.end()); KeySet factor_keys_set; @@ -224,11 +218,10 @@ class HybridNonlinearFactor : public HybridFactor { }; if (!factors_.equals(f.factors_, compare)) return false; - // If everything above passes, and the keys_, discreteKeys_ and normalized_ + // If everything above passes, and the keys_ and discreteKeys_ // member variables are identical, return true. return (std::equal(keys_.begin(), keys_.end(), f.keys().begin()) && - (discreteKeys_ == f.discreteKeys_) && - (normalized_ == f.normalized_)); + (discreteKeys_ == f.discreteKeys_)); } /// @} @@ -259,46 +252,6 @@ class HybridNonlinearFactor : public HybridFactor { return std::make_shared( continuousKeys_, discreteKeys_, linearized_factors); } - - /** - * If the component factors are not already normalized, we want to compute - * their normalizing constants so that the resulting joint distribution is - * appropriately computed. Remember, this is the _negative_ normalizing - * constant for the measurement likelihood (since we are minimizing the - * _negative_ log-likelihood). - */ - double nonlinearFactorLogNormalizingConstant(const sharedFactor& factor, - const Values& values) const { - // Information matrix (inverse covariance matrix) for the factor. - Matrix infoMat; - - // If this is a NoiseModelFactor, we'll use its noiseModel to - // otherwise noiseModelFactor will be nullptr - if (auto noiseModelFactor = - std::dynamic_pointer_cast(factor)) { - // If dynamic cast to NoiseModelFactor succeeded, see if the noise model - // is Gaussian - auto noiseModel = noiseModelFactor->noiseModel(); - - auto gaussianNoiseModel = - std::dynamic_pointer_cast(noiseModel); - if (gaussianNoiseModel) { - // If the noise model is Gaussian, retrieve the information matrix - infoMat = gaussianNoiseModel->information(); - } else { - // If the factor is not a Gaussian factor, we'll linearize it to get - // something with a normalized noise model - // TODO(kevin): does this make sense to do? I think maybe not in - // general? Should we just yell at the user? - auto gaussianFactor = factor->linearize(values); - infoMat = gaussianFactor->information(); - } - } - - // Compute the (negative) log of the normalizing constant - return -(factor->dim() * log(2.0 * M_PI) / 2.0) - - (log(infoMat.determinant()) / 2.0); - } }; } // namespace gtsam diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index dcf0fce04..53bd634c4 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -255,9 +255,6 @@ class HybridNonlinearFactor : gtsam::HybridFactor { double error(const gtsam::Values& continuousValues, const gtsam::DiscreteValues& discreteValues) const; - double nonlinearFactorLogNormalizingConstant( - const gtsam::NonlinearFactor* factor, const gtsam::Values& values) const; - HybridGaussianFactor* linearize(const gtsam::Values& continuousValues) const; void print(string s = "HybridNonlinearFactor\n", From 149601761b76cc8e690197357b3b444a9e2756f6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 18 Sep 2024 17:59:18 -0400 Subject: [PATCH 255/398] fix wrapper --- gtsam/hybrid/hybrid.i | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index 574b013a5..df4e6966d 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -248,13 +248,11 @@ class HybridNonlinearFactor : gtsam::HybridFactor { HybridNonlinearFactor( const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys, const gtsam::DecisionTree< - gtsam::Key, std::pair>& factors, - bool normalized = false); + gtsam::Key, std::pair>& factors); HybridNonlinearFactor( const gtsam::KeyVector& keys, const gtsam::DiscreteKey& discreteKey, - const std::vector>& factors, - bool normalized = false); + const std::vector>& factors); double error(const gtsam::Values& continuousValues, const gtsam::DiscreteValues& discreteValues) const; From 987ecd4a07a40cf88cebd9f820c3764701275d70 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 18 Sep 2024 18:19:24 -0400 Subject: [PATCH 256/398] undo accidental rename --- gtsam/hybrid/tests/testHybridGaussianFactor.cpp | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index c89e5d260..dff120855 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -221,9 +221,8 @@ double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, return 1 / (1 + e); }; -static HybridBayesNet GetHybridGaussianConditionalModel(double mu0, double mu1, - double sigma0, - double sigma1) { +static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, + double sigma0, double sigma1) { DiscreteKey m(M(0), 2); Key z = Z(0); @@ -258,7 +257,7 @@ static HybridBayesNet GetHybridGaussianConditionalModel(double mu0, double mu1, * The resulting factor graph should eliminate to a Bayes net * which represents a sigmoid function. */ -TEST(HybridGaussianFactor, HybridGaussianConditionalModel) { +TEST(HybridGaussianFactor, GaussianMixtureModel) { using namespace test_gmm; double mu0 = 1.0, mu1 = 3.0; @@ -267,7 +266,7 @@ TEST(HybridGaussianFactor, HybridGaussianConditionalModel) { DiscreteKey m(M(0), 2); Key z = Z(0); - auto hbn = GetHybridGaussianConditionalModel(mu0, mu1, sigma, sigma); + auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma, sigma); // The result should be a sigmoid. // So should be P(m=1|z) = 0.5 at z=3.0 - 1.0=2.0 @@ -330,7 +329,7 @@ TEST(HybridGaussianFactor, HybridGaussianConditionalModel) { * which represents a Gaussian-like function * where m1>m0 close to 3.1333. */ -TEST(HybridGaussianFactor, HybridGaussianConditionalModel2) { +TEST(HybridGaussianFactor, GaussianMixtureModel2) { using namespace test_gmm; double mu0 = 1.0, mu1 = 3.0; @@ -339,7 +338,7 @@ TEST(HybridGaussianFactor, HybridGaussianConditionalModel2) { DiscreteKey m(M(0), 2); Key z = Z(0); - auto hbn = GetHybridGaussianConditionalModel(mu0, mu1, sigma0, sigma1); + auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma0, sigma1); double m1_high = 3.133, lambda = 4; { From 80d9a5a65fc4fdf0ef45921a971597c73ef3c14f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 10:27:43 -0400 Subject: [PATCH 257/398] remove duplicate test and focus only on direct specification --- .../hybrid/tests/testHybridGaussianFactor.cpp | 161 +----------------- 1 file changed, 7 insertions(+), 154 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index dff120855..8bf508022 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -741,152 +741,6 @@ TEST(HybridGaussianFactor, TwoStateModel4) { EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002)); } -/** - * @brief Helper function to specify a Hybrid Bayes Net - * P(X1)P(Z1 | X1, X2, M1) and convert it to a Hybrid Factor Graph - * ϕ(X1)ϕ(X1, X2, M1; Z1) by converting to likelihoods given Z1. - * - * We can specify either different means or different sigmas, - * or both for each hybrid factor component. - * - * @param values Initial values for linearization. - * @param means The mean values for the conditional components. - * @param sigmas Noise model sigma values (standard deviation). - * @param m1 The discrete mode key. - * @param z1 The measurement value. - * @return HybridGaussianFactorGraph - */ -static HybridGaussianFactorGraph GetFactorGraphFromBayesNet( - const gtsam::Values &values, const std::vector &means, - const std::vector &sigmas, DiscreteKey &m1, double z1 = 0.0) { - // Noise models - auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); - auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - - // HybridGaussianFactor component factors - auto f0 = - std::make_shared>(X(0), X(1), means[0], model0); - auto f1 = - std::make_shared>(X(0), X(1), means[1], model1); - - /// Get terms for each p^m(z1 | x1, x2) - Matrix H0_1, H0_2, H1_1, H1_2; - double x1 = values.at(X(0)), x2 = values.at(X(1)); - Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); - std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(0), H0_1 /*Sp1*/}, - {X(1), H0_2 /*Tp2*/}}; - - Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); - std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(0), H1_1 /*Sp1*/}, - {X(1), H1_2 /*Tp2*/}}; - // Create conditional P(Z1 | X1, X2, M1) - auto conditionals = std::vector{ - std::make_shared(terms0, 1, -d0, model0), - std::make_shared(terms1, 1, -d1, model1)}; - gtsam::HybridBayesNet bn; - bn.emplace_shared( - KeyVector{Z(1)}, KeyVector{X(0), X(1)}, DiscreteKeys{m1}, conditionals); - - // Create FG via toFactorGraph - gtsam::VectorValues measurements; - measurements.insert(Z(1), gtsam::I_1x1 * z1); // Set Z1 - HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); - - // Linearized prior factor on X1 - auto prior = PriorFactor(X(0), x1, prior_noise).linearize(values); - mixture_fg.push_back(prior); - - return mixture_fg; -} - -/* ************************************************************************* */ -/** - * @brief Test Hybrid Factor Graph. - * - * We specify a hybrid Bayes network P(Z | X, M) = P(X1)P(Z1 | X1, X2, M1), - * which is then converted to a factor graph by specifying Z1. - * This is different from the TwoStateModel version since - * we use a factor with 2 continuous variables ϕ(x1, x2, m1) - * directly instead of a conditional. - * This serves as a good sanity check. - * - * P(Z1 | X1, X2, M1) has 2 conditionals each for the binary - * mode m1. - */ -TEST(HybridGaussianFactor, FactorGraphFromBayesNet) { - DiscreteKey m1(M(1), 2); - - Values values; - double x1 = 0.0, x2 = 1.75; - values.insert(X(0), x1); - values.insert(X(1), x2); - - // Different means, same sigma - std::vector means{0.0, 2.0}, sigmas{1e-0, 1e-0}; - - HybridGaussianFactorGraph hfg = - GetFactorGraphFromBayesNet(values, means, sigmas, m1, 0.0); - - { - // With no measurement on X2, each mode should be equally likely - auto bn = hfg.eliminateSequential(); - HybridValues actual = bn->optimize(); - - HybridValues expected( - VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(-1.75)}}, - DiscreteValues{{M(1), 0}}); - - EXPECT(assert_equal(expected, actual)); - - DiscreteValues dv0{{M(1), 0}}; - VectorValues cont0 = bn->optimize(dv0); - double error0 = bn->error(HybridValues(cont0, dv0)); - // regression - EXPECT_DOUBLES_EQUAL(0.69314718056, error0, 1e-9); - - DiscreteValues dv1{{M(1), 1}}; - VectorValues cont1 = bn->optimize(dv1); - double error1 = bn->error(HybridValues(cont1, dv1)); - EXPECT_DOUBLES_EQUAL(error0, error1, 1e-9); - } - { - // If we add a measurement on X2, we have more information to work with. - // Add a measurement on X2 - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - GaussianConditional meas_z2(Z(2), Vector1(2.0), I_1x1, X(1), I_1x1, - prior_noise); - auto prior_x2 = meas_z2.likelihood(Vector1(x2)); - - hfg.push_back(prior_x2); - - auto bn = hfg.eliminateSequential(); - HybridValues actual = bn->optimize(); - - // regression - HybridValues expected( - VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(0.25)}}, - DiscreteValues{{M(1), 1}}); - EXPECT(assert_equal(expected, actual)); - - DiscreteValues dv0{{M(1), 0}}; - VectorValues cont0 = bn->optimize(dv0); - // regression - EXPECT_DOUBLES_EQUAL(2.12692448787, bn->error(HybridValues(cont0, dv0)), - 1e-9); - - DiscreteValues dv1{{M(1), 1}}; - VectorValues cont1 = bn->optimize(dv1); - // regression - EXPECT_DOUBLES_EQUAL(0.126928487854, bn->error(HybridValues(cont1, dv1)), - 1e-9); - } -} - namespace test_direct_factor_graph { /** * @brief Create a Factor Graph by directly specifying all @@ -902,10 +756,11 @@ namespace test_direct_factor_graph { */ static HybridGaussianFactorGraph CreateFactorGraph( const gtsam::Values &values, const std::vector &means, - const std::vector &sigmas, DiscreteKey &m1) { + const std::vector &sigmas, DiscreteKey &m1, + double measurement_noise = 1e-3) { auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + auto prior_noise = noiseModel::Isotropic::Sigma(1, measurement_noise); auto f0 = std::make_shared>(X(0), X(1), means[0], model0) @@ -917,10 +772,10 @@ static HybridGaussianFactorGraph CreateFactorGraph( // Create HybridGaussianFactor std::vector factors{ {f0, ComputeLogNormalizer(model0)}, {f1, ComputeLogNormalizer(model1)}}; - HybridGaussianFactor mixtureFactor({X(0), X(1)}, {m1}, factors); + HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors); HybridGaussianFactorGraph hfg; - hfg.push_back(mixtureFactor); + hfg.push_back(motionFactor); hfg.push_back(PriorFactor(X(0), values.at(X(0)), prior_noise) .linearize(values)); @@ -1025,10 +880,8 @@ TEST(HybridGaussianFactor, DifferentCovariancesFG) { std::vector means = {0.0, 0.0}, sigmas = {1e2, 1e-2}; // Create FG with HybridGaussianFactor and prior on X1 - HybridGaussianFactorGraph mixture_fg = - CreateFactorGraph(values, means, sigmas, m1); - - auto hbn = mixture_fg.eliminateSequential(); + HybridGaussianFactorGraph fg = CreateFactorGraph(values, means, sigmas, m1); + auto hbn = fg.eliminateSequential(); VectorValues cv; cv.insert(X(0), Vector1(0.0)); From 717eb7eadc8141ac0c41e9afc564b75693160c87 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 10:28:16 -0400 Subject: [PATCH 258/398] relinearization test --- .../tests/testHybridNonlinearFactorGraph.cpp | 60 +++++++++++++++++-- 1 file changed, 54 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 3631ac44e..261622463 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -857,10 +857,10 @@ namespace test_relinearization { */ static HybridNonlinearFactorGraph CreateFactorGraph( const std::vector &means, const std::vector &sigmas, - DiscreteKey &m1, double x0_measurement) { + DiscreteKey &m1, double x0_measurement, double measurement_noise = 1e-3) { auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + auto prior_noise = noiseModel::Isotropic::Sigma(1, measurement_noise); auto f0 = std::make_shared>(X(0), X(1), means[0], model0); @@ -871,7 +871,7 @@ static HybridNonlinearFactorGraph CreateFactorGraph( std::vector factors{ {f0, ComputeLogNormalizer(model0)}, {f1, ComputeLogNormalizer(model1)}}; - HybridNonlinearFactor mixtureFactor({X(0), X(1)}, {m1}, factors); + HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors); HybridNonlinearFactorGraph hfg; hfg.push_back(mixtureFactor); @@ -968,7 +968,7 @@ TEST(HybridNonlinearFactorGraph, DifferentMeans) { * | * M1 */ -TEST_DISABLED(HybridNonlinearFactorGraph, DifferentCovariances) { +TEST(HybridNonlinearFactorGraph, DifferentCovariances) { using namespace test_relinearization; DiscreteKey m1(M(1), 2); @@ -982,8 +982,10 @@ TEST_DISABLED(HybridNonlinearFactorGraph, DifferentCovariances) { // Create FG with HybridNonlinearFactor and prior on X1 HybridNonlinearFactorGraph hfg = CreateFactorGraph(means, sigmas, m1, x0); - // Linearize and eliminate - auto hbn = hfg.linearize(values)->eliminateSequential(); + // Linearize + auto hgfg = hfg.linearize(values); + // and eliminate + auto hbn = hgfg->eliminateSequential(); VectorValues cv; cv.insert(X(0), Vector1(0.0)); @@ -1005,6 +1007,52 @@ TEST_DISABLED(HybridNonlinearFactorGraph, DifferentCovariances) { EXPECT(assert_equal(expected_m1, actual_m1)); } +TEST(HybridNonlinearFactorGraph, Relinearization) { + using namespace test_relinearization; + + DiscreteKey m1(M(1), 2); + + Values values; + double x0 = 0.0, x1 = 0.8; + values.insert(X(0), x0); + values.insert(X(1), x1); + + std::vector means = {0.0, 1.0}, sigmas = {1e-2, 1e-2}; + + double prior_sigma = 1e-2; + // Create FG with HybridNonlinearFactor and prior on X1 + HybridNonlinearFactorGraph hfg = + CreateFactorGraph(means, sigmas, m1, 0.0, prior_sigma); + hfg.push_back(PriorFactor( + X(1), 1.2, noiseModel::Isotropic::Sigma(1, prior_sigma))); + + // Linearize + auto hgfg = hfg.linearize(values); + // and eliminate + auto hbn = hgfg->eliminateSequential(); + + HybridValues delta = hbn->optimize(); + values = values.retract(delta.continuous()); + + Values expected_first_result; + expected_first_result.insert(X(0), 0.0666666666667); + expected_first_result.insert(X(1), 1.13333333333); + EXPECT(assert_equal(expected_first_result, values)); + + // Re-linearize + hgfg = hfg.linearize(values); + // and eliminate + hbn = hgfg->eliminateSequential(); + delta = hbn->optimize(); + HybridValues result(delta.continuous(), delta.discrete(), + values.retract(delta.continuous())); + + HybridValues expected_result( + VectorValues{{X(0), Vector1(0)}, {X(1), Vector1(0)}}, + DiscreteValues{{M(1), 1}}, expected_first_result); + EXPECT(assert_equal(expected_result, result)); +} + /* ************************************************************************* */ int main() { From f875b86357448c71cc9e1fd7be18c2ef0710b4bb Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 10:30:35 -0400 Subject: [PATCH 259/398] print nonlinear part of HybridValues --- gtsam/hybrid/HybridValues.cpp | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridValues.cpp b/gtsam/hybrid/HybridValues.cpp index 1fb2a2adb..533cd6eab 100644 --- a/gtsam/hybrid/HybridValues.cpp +++ b/gtsam/hybrid/HybridValues.cpp @@ -36,9 +36,12 @@ HybridValues::HybridValues(const VectorValues& cv, const DiscreteValues& dv, void HybridValues::print(const std::string& s, const KeyFormatter& keyFormatter) const { std::cout << s << ": \n"; - continuous_.print(" Continuous", - keyFormatter); // print continuous components - discrete_.print(" Discrete", keyFormatter); // print discrete components + // print continuous components + continuous_.print(" Continuous", keyFormatter); + // print discrete components + discrete_.print(" Discrete", keyFormatter); + // print nonlinear components + nonlinear_.print(" Nonlinear", keyFormatter); } /* ************************************************************************* */ From 9b6facd26246051b4d9396b81c503e10350695a8 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 15:33:24 -0400 Subject: [PATCH 260/398] add documentation for additive scalar in the error and remove the 0.5 since it gets cancelled out during normalization --- gtsam/hybrid/HybridGaussianFactor.h | 9 +++++++++ gtsam/hybrid/HybridNonlinearFactor.h | 15 +++++++++++++-- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 8c73b7952..d724bdff3 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -45,6 +45,15 @@ using GaussianFactorValuePair = std::pair; * where the set of discrete variables indexes to * the continuous gaussian distribution. * + * In factor graphs the error function typically returns 0.5*|h(x)-z|^2, i.e., + * the negative log-likelihood for a Gaussian noise model. + * In hybrid factor graphs we allow *adding* an arbitrary scalar dependent on + * the discrete assignment. + * For example, adding a 70/30 mode probability is supported by providing the + * scalars $-log(.7)$ and $-log(.3)$. + * Note that adding a common constant will not make any difference in the + * optimization, so $-log(70)$ and $-log(30)$ work just as well. + * * @ingroup hybrid */ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 3c0e2ab91..a0c7af92b 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -45,6 +45,17 @@ using NonlinearFactorValuePair = std::pair; * This class stores all factors as HybridFactors which can then be typecast to * one of (NonlinearFactor, GaussianFactor) which can then be checked to perform * the correct operation. + * + * In factor graphs the error function typically returns 0.5*|h(x)-z|^2, i.e., + * the negative log-likelihood for a Gaussian noise model. + * In hybrid factor graphs we allow *adding* an arbitrary scalar dependent on + * the discrete assignment. + * For example, adding a 70/30 mode probability is supported by providing the + * scalars $-log(.7)$ and $-log(.3)$. + * Note that adding a common constant will not make any difference in the + * optimization, so $-log(70)$ and $-log(30)$ work just as well. + * + * @ingroup hybrid */ class HybridNonlinearFactor : public HybridFactor { public: @@ -134,7 +145,7 @@ class HybridNonlinearFactor : public HybridFactor { auto errorFunc = [continuousValues](const std::pair& f) { auto [factor, val] = f; - return factor->error(continuousValues) + (0.5 * val); + return factor->error(continuousValues) + val; }; DecisionTree result(factors_, errorFunc); return result; @@ -153,7 +164,7 @@ class HybridNonlinearFactor : public HybridFactor { auto [factor, val] = factors_(discreteValues); // Compute the error for the selected factor const double factorError = factor->error(continuousValues); - return factorError + (0.5 * val); + return factorError + val; } /** From 244661afb1c1318c094cd9da37f8ef74ace9e3ac Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 16:09:54 -0400 Subject: [PATCH 261/398] rename ComputeLogNormalizer to ComputeLogNormalizerConstant --- gtsam/hybrid/HybridGaussianFactor.cpp | 6 +++--- gtsam/hybrid/tests/testHybridGaussianFactor.cpp | 3 ++- gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp | 3 ++- gtsam/linear/NoiseModel.cpp | 4 ++-- gtsam/linear/NoiseModel.h | 4 ++-- gtsam/linear/tests/testNoiseModel.cpp | 6 +++--- 6 files changed, 14 insertions(+), 12 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index cabe6defc..13b26db6c 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -30,8 +30,8 @@ namespace gtsam { /** * @brief Helper function to augment the [A|b] matrices in the factor components - * with the normalizer values. - * This is done by storing the normalizer value in + * with the additional scalar values. + * This is done by storing the value in * the `b` vector as an additional row. * * @param factors DecisionTree of GaussianFactors and arbitrary scalars. @@ -56,7 +56,7 @@ HybridGaussianFactor::Factors augment( const HybridGaussianFactor::sharedFactor &gf) { auto jf = std::dynamic_pointer_cast(gf); if (!jf) return gf; - // If the log_normalizer is 0, do nothing + // If the value is 0, do nothing if (values(assignment) == 0.0) return gf; GaussianFactorGraph gfg; diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 246b8d4ce..63b37e7d4 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -771,7 +771,8 @@ static HybridGaussianFactorGraph CreateFactorGraph( // Create HybridGaussianFactor std::vector factors{ - {f0, ComputeLogNormalizer(model0)}, {f1, ComputeLogNormalizer(model1)}}; + {f0, ComputeLogNormalizerConstant(model0)}, + {f1, ComputeLogNormalizerConstant(model1)}}; HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors); HybridGaussianFactorGraph hfg; diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 133b165c1..98bbd36d8 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -869,7 +869,8 @@ static HybridNonlinearFactorGraph CreateFactorGraph( // Create HybridNonlinearFactor std::vector factors{ - {f0, ComputeLogNormalizer(model0)}, {f1, ComputeLogNormalizer(model1)}}; + {f0, ComputeLogNormalizerConstant(model0)}, + {f1, ComputeLogNormalizerConstant(model1)}}; HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors); diff --git a/gtsam/linear/NoiseModel.cpp b/gtsam/linear/NoiseModel.cpp index a586d119b..d6526fb9c 100644 --- a/gtsam/linear/NoiseModel.cpp +++ b/gtsam/linear/NoiseModel.cpp @@ -710,7 +710,7 @@ const RobustModel::shared_ptr &robust, const NoiseModel::shared_ptr noise){ } // namespace noiseModel /* *******************************************************************************/ -double ComputeLogNormalizer( +double ComputeLogNormalizerConstant( const noiseModel::Gaussian::shared_ptr& noise_model) { // Since noise models are Gaussian, we can get the logDeterminant using // the same trick as in GaussianConditional @@ -725,7 +725,7 @@ double ComputeLogNormalizer( size_t n = noise_model->dim(); constexpr double log2pi = 1.8378770664093454835606594728112; - return n * log2pi + logDeterminantSigma; + return 0.5*(n * log2pi + logDeterminantSigma); } } // gtsam diff --git a/gtsam/linear/NoiseModel.h b/gtsam/linear/NoiseModel.h index ffc1a3ebc..d2ceb24db 100644 --- a/gtsam/linear/NoiseModel.h +++ b/gtsam/linear/NoiseModel.h @@ -757,10 +757,10 @@ namespace gtsam { * We compute this in the log-space for numerical accuracy. * * @param noise_model The Gaussian noise model - * whose normalizer we wish to compute. + * whose normalization constant we wish to compute. * @return double */ - GTSAM_EXPORT double ComputeLogNormalizer( + GTSAM_EXPORT double ComputeLogNormalizerConstant( const noiseModel::Gaussian::shared_ptr& noise_model); } //\ namespace gtsam diff --git a/gtsam/linear/tests/testNoiseModel.cpp b/gtsam/linear/tests/testNoiseModel.cpp index 49874f901..4248f5beb 100644 --- a/gtsam/linear/tests/testNoiseModel.cpp +++ b/gtsam/linear/tests/testNoiseModel.cpp @@ -807,11 +807,11 @@ TEST(NoiseModel, NonDiagonalGaussian) } } -TEST(NoiseModel, ComputeLogNormalizer) { +TEST(NoiseModel, ComputeLogNormalizerConstant) { // Very simple 1D noise model, which we can compute by hand. double sigma = 0.1; auto noise_model = Isotropic::Sigma(1, sigma); - double actual_value = ComputeLogNormalizer(noise_model); + double actual_value = ComputeLogNormalizerConstant(noise_model); // Compute log(|2πΣ|) by hand. // = log(2π) + log(Σ) (since it is 1D) constexpr double log2pi = 1.8378770664093454835606594728112; @@ -821,7 +821,7 @@ TEST(NoiseModel, ComputeLogNormalizer) { // Similar situation in the 3D case size_t n = 3; auto noise_model2 = Isotropic::Sigma(n, sigma); - double actual_value2 = ComputeLogNormalizer(noise_model2); + double actual_value2 = ComputeLogNormalizerConstant(noise_model2); // We multiply by 3 due to the determinant double expected_value2 = n * (log2pi + log(sigma * sigma)); EXPECT_DOUBLES_EQUAL(expected_value2, actual_value2, 1e-9); From af06b3382530bd22dfd8962abdf44b2bf626c4a6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 15:55:13 -0400 Subject: [PATCH 262/398] split HybridNonlinearFactor into .h and .cpp --- gtsam/hybrid/HybridNonlinearFactor.cpp | 140 +++++++++++++++++++++++++ gtsam/hybrid/HybridNonlinearFactor.h | 98 ++--------------- 2 files changed, 151 insertions(+), 87 deletions(-) create mode 100644 gtsam/hybrid/HybridNonlinearFactor.cpp diff --git a/gtsam/hybrid/HybridNonlinearFactor.cpp b/gtsam/hybrid/HybridNonlinearFactor.cpp new file mode 100644 index 000000000..3584b77be --- /dev/null +++ b/gtsam/hybrid/HybridNonlinearFactor.cpp @@ -0,0 +1,140 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridNonlinearFactor.h + * @brief A set of nonlinear factors indexed by a set of discrete keys. + * @author Varun Agrawal + * @date Sep 12, 2024 + */ + +// #include +// #include +// #include +#include +// #include +// #include +// #include + +namespace gtsam { + +/* *******************************************************************************/ +HybridNonlinearFactor::HybridNonlinearFactor(const KeyVector& keys, + const DiscreteKeys& discreteKeys, + const Factors& factors) + : Base(keys, discreteKeys), factors_(factors) {} + +/* *******************************************************************************/ +AlgebraicDecisionTree HybridNonlinearFactor::errorTree( + const Values& continuousValues) const { + // functor to convert from sharedFactor to double error value. + auto errorFunc = + [continuousValues](const std::pair& f) { + auto [factor, val] = f; + return factor->error(continuousValues) + val; + }; + DecisionTree result(factors_, errorFunc); + return result; +} + +/* *******************************************************************************/ +double HybridNonlinearFactor::error( + const Values& continuousValues, + const DiscreteValues& discreteValues) const { + // Retrieve the factor corresponding to the assignment in discreteValues. + auto [factor, val] = factors_(discreteValues); + // Compute the error for the selected factor + const double factorError = factor->error(continuousValues); + return factorError + val; +} + +/* *******************************************************************************/ +double HybridNonlinearFactor::error(const HybridValues& values) const { + return error(values.nonlinear(), values.discrete()); +} + +/* *******************************************************************************/ +size_t HybridNonlinearFactor::dim() const { + const auto assignments = DiscreteValues::CartesianProduct(discreteKeys_); + auto [factor, val] = factors_(assignments.at(0)); + return factor->dim(); +} + +/* *******************************************************************************/ +void HybridNonlinearFactor::print(const std::string& s, + const KeyFormatter& keyFormatter) const { + std::cout << (s.empty() ? "" : s + " "); + Base::print("", keyFormatter); + std::cout << "\nHybridNonlinearFactor\n"; + auto valueFormatter = [](const std::pair& v) { + auto [factor, val] = v; + if (factor) { + return "Nonlinear factor on " + std::to_string(factor->size()) + " keys"; + } else { + return std::string("nullptr"); + } + }; + factors_.print("", keyFormatter, valueFormatter); +} + +/* *******************************************************************************/ +bool HybridNonlinearFactor::equals(const HybridFactor& other, + double tol) const { + // We attempt a dynamic cast from HybridFactor to HybridNonlinearFactor. If + // it fails, return false. + if (!dynamic_cast(&other)) return false; + + // If the cast is successful, we'll properly construct a + // HybridNonlinearFactor object from `other` + const HybridNonlinearFactor& f( + static_cast(other)); + + // Ensure that this HybridNonlinearFactor and `f` have the same `factors_`. + auto compare = [tol](const std::pair& a, + const std::pair& b) { + return traits::Equals(*a.first, *b.first, tol) && + (a.second == b.second); + }; + if (!factors_.equals(f.factors_, compare)) return false; + + // If everything above passes, and the keys_ and discreteKeys_ + // member variables are identical, return true. + return (std::equal(keys_.begin(), keys_.end(), f.keys().begin()) && + (discreteKeys_ == f.discreteKeys_)); +} + +/* *******************************************************************************/ +GaussianFactor::shared_ptr HybridNonlinearFactor::linearize( + const Values& continuousValues, + const DiscreteValues& discreteValues) const { + auto factor = factors_(discreteValues).first; + return factor->linearize(continuousValues); +} + +/* *******************************************************************************/ +std::shared_ptr HybridNonlinearFactor::linearize( + const Values& continuousValues) const { + // functional to linearize each factor in the decision tree + auto linearizeDT = + [continuousValues]( + const std::pair& f) -> GaussianFactorValuePair { + auto [factor, val] = f; + return {factor->linearize(continuousValues), val}; + }; + + DecisionTree> + linearized_factors(factors_, linearizeDT); + + return std::make_shared(continuousKeys_, discreteKeys_, + linearized_factors); +} + +} // namespace gtsam \ No newline at end of file diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index a0c7af92b..77434208e 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -11,7 +11,7 @@ /** * @file HybridNonlinearFactor.h - * @brief Nonlinear Mixture factor of continuous and discrete. + * @brief A set of nonlinear factors indexed by a set of discrete keys. * @author Kevin Doherty, kdoherty@mit.edu * @author Varun Agrawal * @date December 2021 @@ -85,8 +85,7 @@ class HybridNonlinearFactor : public HybridFactor { * @param factors Decision tree with of shared factors. */ HybridNonlinearFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, - const Factors& factors) - : Base(keys, discreteKeys), factors_(factors) {} + const Factors& factors); /** * @brief Convenience constructor that generates the underlying factor @@ -140,16 +139,7 @@ class HybridNonlinearFactor : public HybridFactor { * @return AlgebraicDecisionTree A decision tree with the same keys * as the factor, and leaf values as the error. */ - AlgebraicDecisionTree errorTree(const Values& continuousValues) const { - // functor to convert from sharedFactor to double error value. - auto errorFunc = - [continuousValues](const std::pair& f) { - auto [factor, val] = f; - return factor->error(continuousValues) + val; - }; - DecisionTree result(factors_, errorFunc); - return result; - } + AlgebraicDecisionTree errorTree(const Values& continuousValues) const; /** * @brief Compute error of factor given both continuous and discrete values. @@ -159,13 +149,7 @@ class HybridNonlinearFactor : public HybridFactor { * @return double The error of this factor. */ double error(const Values& continuousValues, - const DiscreteValues& discreteValues) const { - // Retrieve the factor corresponding to the assignment in discreteValues. - auto [factor, val] = factors_(discreteValues); - // Compute the error for the selected factor - const double factorError = factor->error(continuousValues); - return factorError + val; - } + const DiscreteValues& discreteValues) const; /** * @brief Compute error of factor given hybrid values. @@ -173,67 +157,24 @@ class HybridNonlinearFactor : public HybridFactor { * @param values The continuous Values and the discrete assignment. * @return double The error of this factor. */ - double error(const HybridValues& values) const override { - return error(values.nonlinear(), values.discrete()); - } + double error(const HybridValues& values) const override; /** * @brief Get the dimension of the factor (number of rows on linearization). * Returns the dimension of the first component factor. * @return size_t */ - size_t dim() const { - const auto assignments = DiscreteValues::CartesianProduct(discreteKeys_); - auto [factor, val] = factors_(assignments.at(0)); - return factor->dim(); - } + size_t dim() const; /// Testable /// @{ /// print to stdout - void print( - const std::string& s = "", - const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override { - std::cout << (s.empty() ? "" : s + " "); - Base::print("", keyFormatter); - std::cout << "\nHybridNonlinearFactor\n"; - auto valueFormatter = [](const std::pair& v) { - auto [factor, val] = v; - if (factor) { - return "Nonlinear factor on " + std::to_string(factor->size()) + - " keys"; - } else { - return std::string("nullptr"); - } - }; - factors_.print("", keyFormatter, valueFormatter); - } + void print(const std::string& s = "", const KeyFormatter& keyFormatter = + DefaultKeyFormatter) const override; /// Check equality - bool equals(const HybridFactor& other, double tol = 1e-9) const override { - // We attempt a dynamic cast from HybridFactor to HybridNonlinearFactor. If - // it fails, return false. - if (!dynamic_cast(&other)) return false; - - // If the cast is successful, we'll properly construct a - // HybridNonlinearFactor object from `other` - const HybridNonlinearFactor& f( - static_cast(other)); - - // Ensure that this HybridNonlinearFactor and `f` have the same `factors_`. - auto compare = [tol](const std::pair& a, - const std::pair& b) { - return traits::Equals(*a.first, *b.first, tol) && - (a.second == b.second); - }; - if (!factors_.equals(f.factors_, compare)) return false; - - // If everything above passes, and the keys_ and discreteKeys_ - // member variables are identical, return true. - return (std::equal(keys_.begin(), keys_.end(), f.keys().begin()) && - (discreteKeys_ == f.discreteKeys_)); - } + bool equals(const HybridFactor& other, double tol = 1e-9) const override; /// @} @@ -241,28 +182,11 @@ class HybridNonlinearFactor : public HybridFactor { /// discreteValues. GaussianFactor::shared_ptr linearize( const Values& continuousValues, - const DiscreteValues& discreteValues) const { - auto factor = factors_(discreteValues).first; - return factor->linearize(continuousValues); - } + const DiscreteValues& discreteValues) const; /// Linearize all the continuous factors to get a HybridGaussianFactor. std::shared_ptr linearize( - const Values& continuousValues) const { - // functional to linearize each factor in the decision tree - auto linearizeDT = - [continuousValues](const std::pair& f) - -> GaussianFactorValuePair { - auto [factor, val] = f; - return {factor->linearize(continuousValues), val}; - }; - - DecisionTree> - linearized_factors(factors_, linearizeDT); - - return std::make_shared( - continuousKeys_, discreteKeys_, linearized_factors); - } + const Values& continuousValues) const; }; } // namespace gtsam From 916778c45b191b4a53141252e2917e8176548137 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 15:59:39 -0400 Subject: [PATCH 263/398] remove extra includes --- gtsam/hybrid/HybridNonlinearFactor.cpp | 6 ------ 1 file changed, 6 deletions(-) diff --git a/gtsam/hybrid/HybridNonlinearFactor.cpp b/gtsam/hybrid/HybridNonlinearFactor.cpp index 3584b77be..9613233b1 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.cpp +++ b/gtsam/hybrid/HybridNonlinearFactor.cpp @@ -16,13 +16,7 @@ * @date Sep 12, 2024 */ -// #include -// #include -// #include #include -// #include -// #include -// #include namespace gtsam { From 4f888291bfa1dfba6d1b0c2a92ed90d459b620fe Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 16:11:28 -0400 Subject: [PATCH 264/398] fix docstring for HybridGaussianFactor --- gtsam/hybrid/HybridGaussianFactor.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index d724bdff3..a86714863 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -45,7 +45,7 @@ using GaussianFactorValuePair = std::pair; * where the set of discrete variables indexes to * the continuous gaussian distribution. * - * In factor graphs the error function typically returns 0.5*|h(x)-z|^2, i.e., + * In factor graphs the error function typically returns 0.5*|A*x - b|^2, i.e., * the negative log-likelihood for a Gaussian noise model. * In hybrid factor graphs we allow *adding* an arbitrary scalar dependent on * the discrete assignment. From d60a253fcbac0574790d5f7e70fa09a6363f4a31 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 17:50:15 -0400 Subject: [PATCH 265/398] logNormalizationConstant is now a method for Gaussian noise model --- gtsam/linear/NoiseModel.cpp | 39 +++++++++++++-------------- gtsam/linear/NoiseModel.h | 27 ++++++++++--------- gtsam/linear/tests/testNoiseModel.cpp | 13 +++++---- 3 files changed, 39 insertions(+), 40 deletions(-) diff --git a/gtsam/linear/NoiseModel.cpp b/gtsam/linear/NoiseModel.cpp index d6526fb9c..af3081b9f 100644 --- a/gtsam/linear/NoiseModel.cpp +++ b/gtsam/linear/NoiseModel.cpp @@ -238,6 +238,25 @@ void Gaussian::WhitenSystem(Matrix& A1, Matrix& A2, Matrix& A3, Vector& b) const Matrix Gaussian::information() const { return R().transpose() * R(); } +/* *******************************************************************************/ +double Gaussian::logNormalizationConstant() const { + // Since noise models are Gaussian, we can get the logDeterminant easily + // Sigma = (R'R)^{-1}, det(Sigma) = det((R'R)^{-1}) = det(R'R)^{-1} + // log det(Sigma) = -log(det(R'R)) = -2*log(det(R)) + // Hence, log det(Sigma)) = -2.0 * logDeterminant() + // which gives log = -0.5*n*log(2*pi) - 0.5*(-2.0 * logDeterminant()) + // = -0.5*n*log(2*pi) + (0.5*2.0 * logDeterminant()) + // = -0.5*n*log(2*pi) + logDeterminant() + double logDetR = + R().diagonal().unaryExpr([](double x) { return log(x); }).sum(); + + size_t n = dim(); + constexpr double log2pi = 1.8378770664093454835606594728112; + // Get 1/log(\sqrt(|2pi Sigma|)) = -0.5*log(|2pi Sigma|) + return -0.5 * n * log2pi + logDetR; +} + + /* ************************************************************************* */ // Diagonal /* ************************************************************************* */ @@ -708,24 +727,4 @@ const RobustModel::shared_ptr &robust, const NoiseModel::shared_ptr noise){ /* ************************************************************************* */ } // namespace noiseModel - -/* *******************************************************************************/ -double ComputeLogNormalizerConstant( - const noiseModel::Gaussian::shared_ptr& noise_model) { - // Since noise models are Gaussian, we can get the logDeterminant using - // the same trick as in GaussianConditional - // Sigma = (R'R)^{-1}, det(Sigma) = det((R'R)^{-1}) = det(R'R)^{-1} - // log det(Sigma) = -log(det(R'R)) = -2*log(det(R)) - // Hence, log det(Sigma)) = -2.0 * logDetR() - double logDetR = noise_model->R() - .diagonal() - .unaryExpr([](double x) { return log(x); }) - .sum(); - double logDeterminantSigma = -2.0 * logDetR; - - size_t n = noise_model->dim(); - constexpr double log2pi = 1.8378770664093454835606594728112; - return 0.5*(n * log2pi + logDeterminantSigma); -} - } // gtsam diff --git a/gtsam/linear/NoiseModel.h b/gtsam/linear/NoiseModel.h index d2ceb24db..b8e6c09bc 100644 --- a/gtsam/linear/NoiseModel.h +++ b/gtsam/linear/NoiseModel.h @@ -266,7 +266,20 @@ namespace gtsam { /// Compute covariance matrix virtual Matrix covariance() const; - private: + /** + * @brief Helper method to compute the normalization constant + * for a Gaussian noise model. + * k = 1/log(\sqrt(|2πΣ|)) + * + * We compute this in the log-space for numerical accuracy. + * + * @param noise_model The Gaussian noise model + * whose normalization constant we wish to compute. + * @return double + */ + double logNormalizationConstant() const; + + private: #ifdef GTSAM_ENABLE_BOOST_SERIALIZATION /** Serialization function */ friend class boost::serialization::access; @@ -751,18 +764,6 @@ namespace gtsam { template<> struct traits : public Testable {}; template<> struct traits : public Testable {}; - /** - * @brief Helper function to compute the log(|2πΣ|) normalizer values - * for a Gaussian noise model. - * We compute this in the log-space for numerical accuracy. - * - * @param noise_model The Gaussian noise model - * whose normalization constant we wish to compute. - * @return double - */ - GTSAM_EXPORT double ComputeLogNormalizerConstant( - const noiseModel::Gaussian::shared_ptr& noise_model); - } //\ namespace gtsam diff --git a/gtsam/linear/tests/testNoiseModel.cpp b/gtsam/linear/tests/testNoiseModel.cpp index 4248f5beb..b6e5785b6 100644 --- a/gtsam/linear/tests/testNoiseModel.cpp +++ b/gtsam/linear/tests/testNoiseModel.cpp @@ -811,19 +811,18 @@ TEST(NoiseModel, ComputeLogNormalizerConstant) { // Very simple 1D noise model, which we can compute by hand. double sigma = 0.1; auto noise_model = Isotropic::Sigma(1, sigma); - double actual_value = ComputeLogNormalizerConstant(noise_model); - // Compute log(|2πΣ|) by hand. - // = log(2π) + log(Σ) (since it is 1D) - constexpr double log2pi = 1.8378770664093454835606594728112; - double expected_value = log2pi + log(sigma * sigma); + double actual_value = noise_model->logNormalizationConstant(); + // Compute 1/log(sqrt(|2πΣ|)) by hand. + // = -0.5*(log(2π) + log(Σ)) (since it is 1D) + double expected_value = -0.5 * log(2 * M_PI * sigma * sigma); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); // Similar situation in the 3D case size_t n = 3; auto noise_model2 = Isotropic::Sigma(n, sigma); - double actual_value2 = ComputeLogNormalizerConstant(noise_model2); + double actual_value2 = noise_model2->logNormalizationConstant(); // We multiply by 3 due to the determinant - double expected_value2 = n * (log2pi + log(sigma * sigma)); + double expected_value2 = -0.5 * n * log(2 * M_PI * sigma * sigma); EXPECT_DOUBLES_EQUAL(expected_value2, actual_value2, 1e-9); } From cea0dd577d087ec11e7585ea5f5b88970805447a Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 17:50:25 -0400 Subject: [PATCH 266/398] update tests --- gtsam/hybrid/tests/testHybridGaussianFactor.cpp | 5 +++-- gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 63b37e7d4..1ecd77132 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -770,9 +770,10 @@ static HybridGaussianFactorGraph CreateFactorGraph( ->linearize(values); // Create HybridGaussianFactor + // We multiply by -2 since the we want the underlying scalar to be log(|2πΣ|) std::vector factors{ - {f0, ComputeLogNormalizerConstant(model0)}, - {f1, ComputeLogNormalizerConstant(model1)}}; + {f0, -2 * model0->logNormalizationConstant()}, + {f1, -2 * model1->logNormalizationConstant()}}; HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors); HybridGaussianFactorGraph hfg; diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 98bbd36d8..8d2a5a74b 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -868,9 +868,10 @@ static HybridNonlinearFactorGraph CreateFactorGraph( std::make_shared>(X(0), X(1), means[1], model1); // Create HybridNonlinearFactor + // We multiply by -2 since the we want the underlying scalar to be log(|2πΣ|) std::vector factors{ - {f0, ComputeLogNormalizerConstant(model0)}, - {f1, ComputeLogNormalizerConstant(model1)}}; + {f0, -2 * model0->logNormalizationConstant()}, + {f1, -2 * model1->logNormalizationConstant()}}; HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors); From f3f62d1708bb5f5d28b25974e87b31f0a4fb9fd8 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 21:18:38 -0400 Subject: [PATCH 267/398] mark GTSAM_EXPORT --- gtsam/hybrid/HybridNonlinearFactor.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 77434208e..6da846abe 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -57,7 +57,7 @@ using NonlinearFactorValuePair = std::pair; * * @ingroup hybrid */ -class HybridNonlinearFactor : public HybridFactor { +class GTSAM_EXPORT HybridNonlinearFactor : public HybridFactor { public: using Base = HybridFactor; using This = HybridNonlinearFactor; From 1ab82f382cf390704e7321336c174f4cf12dd945 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 20 Sep 2024 03:26:07 -0400 Subject: [PATCH 268/398] hide sqrt(2*value) so the user doesn't have to premultiply by 2 --- gtsam/hybrid/HybridGaussianConditional.cpp | 3 +-- gtsam/hybrid/HybridGaussianFactor.cpp | 19 +++++++++++-------- .../hybrid/tests/testHybridGaussianFactor.cpp | 7 ++++--- .../tests/testHybridNonlinearFactorGraph.cpp | 7 ++++--- 4 files changed, 20 insertions(+), 16 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 79673d9f2..df59637aa 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -222,8 +222,7 @@ std::shared_ptr HybridGaussianConditional::likelihood( } else { // Add a constant to the likelihood in case the noise models // are not all equal. - double c = 2.0 * Cgm_Kgcm; - return {likelihood_m, c}; + return {likelihood_m, Cgm_Kgcm}; } }); return std::make_shared( diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index 13b26db6c..2fbd4bd88 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -46,31 +46,34 @@ HybridGaussianFactor::Factors augment( AlgebraicDecisionTree valueTree; std::tie(gaussianFactors, valueTree) = unzip(factors); - // Normalize + // Compute minimum value for normalization. double min_value = valueTree.min(); - AlgebraicDecisionTree values = - valueTree.apply([&min_value](double n) { return n - min_value; }); // Finally, update the [A|b] matrices. - auto update = [&values](const Assignment &assignment, - const HybridGaussianFactor::sharedFactor &gf) { + auto update = [&min_value](const GaussianFactorValuePair &gfv) { + auto [gf, value] = gfv; + auto jf = std::dynamic_pointer_cast(gf); if (!jf) return gf; + + double normalized_value = value - min_value; + // If the value is 0, do nothing - if (values(assignment) == 0.0) return gf; + if (normalized_value == 0.0) return gf; GaussianFactorGraph gfg; gfg.push_back(jf); Vector c(1); - c << std::sqrt(values(assignment)); + // When hiding c inside the `b` vector, value == 0.5*c^2 + c << std::sqrt(2.0 * normalized_value); auto constantFactor = std::make_shared(c); gfg.push_back(constantFactor); return std::dynamic_pointer_cast( std::make_shared(gfg)); }; - return gaussianFactors.apply(update); + return HybridGaussianFactor::Factors(factors, update); } /* *******************************************************************************/ diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 1ecd77132..bfa283983 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -770,10 +770,11 @@ static HybridGaussianFactorGraph CreateFactorGraph( ->linearize(values); // Create HybridGaussianFactor - // We multiply by -2 since the we want the underlying scalar to be log(|2πΣ|) + // We take negative since we want + // the underlying scalar to be log(\sqrt(|2πΣ|)) std::vector factors{ - {f0, -2 * model0->logNormalizationConstant()}, - {f1, -2 * model1->logNormalizationConstant()}}; + {f0, -model0->logNormalizationConstant()}, + {f1, -model1->logNormalizationConstant()}}; HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors); HybridGaussianFactorGraph hfg; diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 8d2a5a74b..621c8708e 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -868,10 +868,11 @@ static HybridNonlinearFactorGraph CreateFactorGraph( std::make_shared>(X(0), X(1), means[1], model1); // Create HybridNonlinearFactor - // We multiply by -2 since the we want the underlying scalar to be log(|2πΣ|) + // We take negative since we want + // the underlying scalar to be log(\sqrt(|2πΣ|)) std::vector factors{ - {f0, -2 * model0->logNormalizationConstant()}, - {f1, -2 * model1->logNormalizationConstant()}}; + {f0, -model0->logNormalizationConstant()}, + {f1, -model1->logNormalizationConstant()}}; HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors); From 364b4b4a755cdb1c5299dc770cbade9357d24245 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 20 Sep 2024 04:35:29 -0400 Subject: [PATCH 269/398] logDetR method which leverages noise model for efficiency. Build logDeterminant and logNormalizationConstant on top of it. --- gtsam/linear/NoiseModel.cpp | 37 +++++++++++++++++++++++++++++-------- gtsam/linear/NoiseModel.h | 28 +++++++++++++++++++--------- 2 files changed, 48 insertions(+), 17 deletions(-) diff --git a/gtsam/linear/NoiseModel.cpp b/gtsam/linear/NoiseModel.cpp index af3081b9f..de69fdce9 100644 --- a/gtsam/linear/NoiseModel.cpp +++ b/gtsam/linear/NoiseModel.cpp @@ -239,21 +239,31 @@ void Gaussian::WhitenSystem(Matrix& A1, Matrix& A2, Matrix& A3, Vector& b) const Matrix Gaussian::information() const { return R().transpose() * R(); } /* *******************************************************************************/ -double Gaussian::logNormalizationConstant() const { +double Gaussian::logDetR() const { + double logDetR = + R().diagonal().unaryExpr([](double x) { return log(x); }).sum(); + return logDetR; +} + +/* *******************************************************************************/ +double Gaussian::logDeterminant() const { // Since noise models are Gaussian, we can get the logDeterminant easily // Sigma = (R'R)^{-1}, det(Sigma) = det((R'R)^{-1}) = det(R'R)^{-1} // log det(Sigma) = -log(det(R'R)) = -2*log(det(R)) - // Hence, log det(Sigma)) = -2.0 * logDeterminant() - // which gives log = -0.5*n*log(2*pi) - 0.5*(-2.0 * logDeterminant()) - // = -0.5*n*log(2*pi) + (0.5*2.0 * logDeterminant()) - // = -0.5*n*log(2*pi) + logDeterminant() - double logDetR = - R().diagonal().unaryExpr([](double x) { return log(x); }).sum(); + // Hence, log det(Sigma)) = -2.0 * logDetR() + return -2.0 * logDetR(); +} +/* *******************************************************************************/ +double Gaussian::logNormalizationConstant() const { + // log(det(Sigma)) = -2.0 * logDetR + // which gives log = -0.5*n*log(2*pi) - 0.5*(-2.0 * logDetR()) + // = -0.5*n*log(2*pi) + (0.5*2.0 * logDetR()) + // = -0.5*n*log(2*pi) + logDetR() size_t n = dim(); constexpr double log2pi = 1.8378770664093454835606594728112; // Get 1/log(\sqrt(|2pi Sigma|)) = -0.5*log(|2pi Sigma|) - return -0.5 * n * log2pi + logDetR; + return -0.5 * n * log2pi + logDetR(); } @@ -333,6 +343,11 @@ void Diagonal::WhitenInPlace(Eigen::Block H) const { H = invsigmas().asDiagonal() * H; } +/* *******************************************************************************/ +double Diagonal::logDetR() const { + return invsigmas_.unaryExpr([](double x) { return log(x); }).sum(); +} + /* ************************************************************************* */ // Constrained /* ************************************************************************* */ @@ -661,6 +676,9 @@ void Isotropic::WhitenInPlace(Eigen::Block H) const { H *= invsigma_; } +/* *******************************************************************************/ +double Isotropic::logDetR() const { return log(invsigma_) * dim(); } + /* ************************************************************************* */ // Unit /* ************************************************************************* */ @@ -673,6 +691,9 @@ double Unit::squaredMahalanobisDistance(const Vector& v) const { return v.dot(v); } +/* *******************************************************************************/ +double Unit::logDetR() const { return 0.0; } + /* ************************************************************************* */ // Robust /* ************************************************************************* */ diff --git a/gtsam/linear/NoiseModel.h b/gtsam/linear/NoiseModel.h index b8e6c09bc..91c11d410 100644 --- a/gtsam/linear/NoiseModel.h +++ b/gtsam/linear/NoiseModel.h @@ -183,6 +183,8 @@ namespace gtsam { return *sqrt_information_; } + /// Compute the log of |R|. Used for computing log(|Σ|) + virtual double logDetR() const; public: @@ -266,15 +268,15 @@ namespace gtsam { /// Compute covariance matrix virtual Matrix covariance() const; + /// Compute the log of |Σ| + double logDeterminant() const; + /** - * @brief Helper method to compute the normalization constant - * for a Gaussian noise model. - * k = 1/log(\sqrt(|2πΣ|)) + * @brief Method to compute the normalization constant + * for a Gaussian noise model k = \sqrt(1/|2πΣ|). + * We compute this in the log-space for numerical accuracy, + * thus returning log(k). * - * We compute this in the log-space for numerical accuracy. - * - * @param noise_model The Gaussian noise model - * whose normalization constant we wish to compute. * @return double */ double logNormalizationConstant() const; @@ -308,11 +310,12 @@ namespace gtsam { */ Vector sigmas_, invsigmas_, precisions_; - protected: - /** constructor to allow for disabling initialization of invsigmas */ Diagonal(const Vector& sigmas); + /// Compute the log of |R|. Used for computing log(|Σ|) + virtual double logDetR() const override; + public: /** constructor - no initializations, for serialization */ Diagonal(); @@ -545,6 +548,9 @@ namespace gtsam { Isotropic(size_t dim, double sigma) : Diagonal(Vector::Constant(dim, sigma)),sigma_(sigma),invsigma_(1.0/sigma) {} + /// Compute the log of |R|. Used for computing log(|Σ|) + virtual double logDetR() const override; + public: /* dummy constructor to allow for serialization */ @@ -608,6 +614,10 @@ namespace gtsam { * Unit: i.i.d. unit-variance noise on all m dimensions. */ class GTSAM_EXPORT Unit : public Isotropic { + protected: + /// Compute the log of |R|. Used for computing log(|Σ|) + virtual double logDetR() const override; + public: typedef std::shared_ptr shared_ptr; From 67a8b8fea04541b8994d863457d36cdaaebc0f8d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 20 Sep 2024 04:35:37 -0400 Subject: [PATCH 270/398] comprehensive unit testing --- gtsam/linear/tests/testNoiseModel.cpp | 80 +++++++++++++++++++++++---- 1 file changed, 69 insertions(+), 11 deletions(-) diff --git a/gtsam/linear/tests/testNoiseModel.cpp b/gtsam/linear/tests/testNoiseModel.cpp index b6e5785b6..6aea154ee 100644 --- a/gtsam/linear/tests/testNoiseModel.cpp +++ b/gtsam/linear/tests/testNoiseModel.cpp @@ -807,23 +807,81 @@ TEST(NoiseModel, NonDiagonalGaussian) } } -TEST(NoiseModel, ComputeLogNormalizerConstant) { +TEST(NoiseModel, LogNormalizationConstant1D) { // Very simple 1D noise model, which we can compute by hand. double sigma = 0.1; - auto noise_model = Isotropic::Sigma(1, sigma); - double actual_value = noise_model->logNormalizationConstant(); - // Compute 1/log(sqrt(|2πΣ|)) by hand. + // For expected values, we compute 1/log(sqrt(|2πΣ|)) by hand. // = -0.5*(log(2π) + log(Σ)) (since it is 1D) double expected_value = -0.5 * log(2 * M_PI * sigma * sigma); - EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); - // Similar situation in the 3D case + // Gaussian + { + Matrix11 R; + R << 1 / sigma; + auto noise_model = Gaussian::SqrtInformation(R); + double actual_value = noise_model->logNormalizationConstant(); + EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); + } + // Diagonal + { + auto noise_model = Diagonal::Sigmas(Vector1(sigma)); + double actual_value = noise_model->logNormalizationConstant(); + EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); + } + // Isotropic + { + auto noise_model = Isotropic::Sigma(1, sigma); + double actual_value = noise_model->logNormalizationConstant(); + EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); + } + // Unit + { + auto noise_model = Unit::Create(1); + double actual_value = noise_model->logNormalizationConstant(); + double sigma = 1.0; + expected_value = -0.5 * log(2 * M_PI * sigma * sigma); + EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); + } +} + +TEST(NoiseModel, LogNormalizationConstant3D) { + // Simple 3D noise model, which we can compute by hand. + double sigma = 0.1; size_t n = 3; - auto noise_model2 = Isotropic::Sigma(n, sigma); - double actual_value2 = noise_model2->logNormalizationConstant(); - // We multiply by 3 due to the determinant - double expected_value2 = -0.5 * n * log(2 * M_PI * sigma * sigma); - EXPECT_DOUBLES_EQUAL(expected_value2, actual_value2, 1e-9); + // We compute the expected values just like in the LogNormalizationConstant1D + // test, but we multiply by 3 due to the determinant. + double expected_value = -0.5 * n * log(2 * M_PI * sigma * sigma); + + // Gaussian + { + Matrix33 R; + R << 1 / sigma, 2, 3, // + 0, 1 / sigma, 4, // + 0, 0, 1 / sigma; + auto noise_model = Gaussian::SqrtInformation(R); + double actual_value = noise_model->logNormalizationConstant(); + EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); + } + // Diagonal + { + auto noise_model = Diagonal::Sigmas(Vector3(sigma, sigma, sigma)); + double actual_value = noise_model->logNormalizationConstant(); + EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); + } + // Isotropic + { + auto noise_model = Isotropic::Sigma(n, sigma); + double actual_value = noise_model->logNormalizationConstant(); + EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); + } + // Unit + { + auto noise_model = Unit::Create(3); + double actual_value = noise_model->logNormalizationConstant(); + double sigma = 1.0; + expected_value = -0.5 * n * log(2 * M_PI * sigma * sigma); + EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); + } } /* ************************************************************************* */ From 561bdcf9af0f9eb6fe37f1900035c118b7c74d5b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 13:11:05 -0400 Subject: [PATCH 271/398] HybridGaussianConditional inherits from HybridGaussianFactor --- gtsam/hybrid/HybridGaussianConditional.cpp | 14 +++++++++++++- gtsam/hybrid/HybridGaussianConditional.h | 10 +++++----- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index df59637aa..e17fd3afe 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -28,13 +28,25 @@ #include namespace gtsam { +HybridGaussianFactor::FactorValuePairs GetFactorValuePairs( + const HybridGaussianConditional::Conditionals &conditionals) { + auto func = [](const GaussianConditional::shared_ptr &conditional) + -> GaussianFactorValuePair { + double value = 0.0; + if (conditional) { // Check if conditional is pruned + value = conditional->logNormalizationConstant(); + } + return {std::dynamic_pointer_cast(conditional), value}; + }; + return HybridGaussianFactor::FactorValuePairs(conditionals, func); +} HybridGaussianConditional::HybridGaussianConditional( const KeyVector &continuousFrontals, const KeyVector &continuousParents, const DiscreteKeys &discreteParents, const HybridGaussianConditional::Conditionals &conditionals) : BaseFactor(CollectKeys(continuousFrontals, continuousParents), - discreteParents), + discreteParents, GetFactorValuePairs(conditionals)), BaseConditional(continuousFrontals.size()), conditionals_(conditionals) { // Calculate logConstant_ as the maximum of the log constants of the diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index eb2bbb937..82cf6ec8a 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -51,13 +51,13 @@ class HybridValues; * @ingroup hybrid */ class GTSAM_EXPORT HybridGaussianConditional - : public HybridFactor, - public Conditional { + : public HybridGaussianFactor, + public Conditional { public: using This = HybridGaussianConditional; - using shared_ptr = std::shared_ptr; - using BaseFactor = HybridFactor; - using BaseConditional = Conditional; + using shared_ptr = std::shared_ptr; + using BaseFactor = HybridGaussianFactor; + using BaseConditional = Conditional; /// typedef for Decision Tree of Gaussian Conditionals using Conditionals = DecisionTree; From 9afbc019f4458b9321947bd6be7a71da43ef30d7 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 14:53:40 -0400 Subject: [PATCH 272/398] rename logConstant_ to logNormalizer_ and add test for error with mode-dependent constants --- gtsam/hybrid/HybridGaussianConditional.cpp | 23 +++++++------ gtsam/hybrid/HybridGaussianConditional.h | 5 +-- .../tests/testHybridGaussianConditional.cpp | 33 ++++++++++++++++++- 3 files changed, 48 insertions(+), 13 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index e17fd3afe..e0ae16e82 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -33,8 +33,10 @@ HybridGaussianFactor::FactorValuePairs GetFactorValuePairs( auto func = [](const GaussianConditional::shared_ptr &conditional) -> GaussianFactorValuePair { double value = 0.0; - if (conditional) { // Check if conditional is pruned - value = conditional->logNormalizationConstant(); + // Check if conditional is pruned + if (conditional) { + // Assign log(|2πΣ|) = -2*log(1 / sqrt(|2πΣ|)) + value = -2.0 * conditional->logNormalizationConstant(); } return {std::dynamic_pointer_cast(conditional), value}; }; @@ -49,14 +51,14 @@ HybridGaussianConditional::HybridGaussianConditional( discreteParents, GetFactorValuePairs(conditionals)), BaseConditional(continuousFrontals.size()), conditionals_(conditionals) { - // Calculate logConstant_ as the maximum of the log constants of the + // Calculate logNormalizer_ as the minimum of the log normalizers of the // conditionals, by visiting the decision tree: - logConstant_ = -std::numeric_limits::infinity(); + logNormalizer_ = std::numeric_limits::infinity(); conditionals_.visit( [this](const GaussianConditional::shared_ptr &conditional) { if (conditional) { - this->logConstant_ = std::max( - this->logConstant_, conditional->logNormalizationConstant()); + this->logNormalizer_ = std::min( + this->logNormalizer_, -conditional->logNormalizationConstant()); } }); } @@ -98,7 +100,7 @@ GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree() // First check if conditional has not been pruned if (gc) { const double Cgm_Kgcm = - this->logConstant_ - gc->logNormalizationConstant(); + -gc->logNormalizationConstant() - this->logNormalizer_; // If there is a difference in the covariances, we need to account for // that since the error is dependent on the mode. if (Cgm_Kgcm > 0.0) { @@ -169,7 +171,8 @@ void HybridGaussianConditional::print(const std::string &s, std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), "; } std::cout << std::endl - << " logNormalizationConstant: " << logConstant_ << std::endl + << " logNormalizationConstant: " << logNormalizationConstant() + << std::endl << std::endl; conditionals_.print( "", [&](Key k) { return formatter(k); }, @@ -228,7 +231,7 @@ std::shared_ptr HybridGaussianConditional::likelihood( -> GaussianFactorValuePair { const auto likelihood_m = conditional->likelihood(given); const double Cgm_Kgcm = - logConstant_ - conditional->logNormalizationConstant(); + -conditional->logNormalizationConstant() - logNormalizer_; if (Cgm_Kgcm == 0.0) { return {likelihood_m, 0.0}; } else { @@ -342,7 +345,7 @@ double HybridGaussianConditional::conditionalError( // Check if valid pointer if (conditional) { return conditional->error(continuousValues) + // - logConstant_ - conditional->logNormalizationConstant(); + -conditional->logNormalizationConstant() - logNormalizer_; } else { // If not valid, pointer, it means this conditional was pruned, // so we return maximum error. diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 82cf6ec8a..434750bc9 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -64,7 +64,8 @@ class GTSAM_EXPORT HybridGaussianConditional private: Conditionals conditionals_; ///< a decision tree of Gaussian conditionals. - double logConstant_; ///< log of the normalization constant. + double logNormalizer_; ///< log of the normalization constant + ///< (log(\sqrt(|2πΣ|))). /** * @brief Convert a HybridGaussianConditional of conditionals into @@ -149,7 +150,7 @@ class GTSAM_EXPORT HybridGaussianConditional /// The log normalization constant is max of the the individual /// log-normalization constants. - double logNormalizationConstant() const override { return logConstant_; } + double logNormalizationConstant() const override { return -logNormalizer_; } /** * Create a likelihood factor for a hybrid Gaussian conditional, diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index 406203db8..70cc55712 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -100,7 +100,7 @@ TEST(HybridGaussianConditional, Error) { auto actual = hybrid_conditional.errorTree(vv); // Check result. - std::vector discrete_keys = {mode}; + DiscreteKeys discrete_keys{mode}; std::vector leaves = {conditionals[0]->error(vv), conditionals[1]->error(vv)}; AlgebraicDecisionTree expected(discrete_keys, leaves); @@ -172,6 +172,37 @@ TEST(HybridGaussianConditional, ContinuousParents) { EXPECT(continuousParentKeys[0] == X(0)); } +/* ************************************************************************* */ +/// Check error with mode dependent constants. +TEST(HybridGaussianConditional, Error2) { + using namespace mode_dependent_constants; + auto actual = mixture.errorTree(vv); + + // Check result. + DiscreteKeys discrete_keys{mode}; + double logNormalizer0 = -conditionals[0]->logNormalizationConstant(); + double logNormalizer1 = -conditionals[1]->logNormalizationConstant(); + double minLogNormalizer = std::min(logNormalizer0, logNormalizer1); + + // Expected error is e(X) + log(|2πΣ|). + // We normalize log(|2πΣ|) with min(logNormalizers) so it is non-negative. + std::vector leaves = { + conditionals[0]->error(vv) + logNormalizer0 - minLogNormalizer, + conditionals[1]->error(vv) + logNormalizer1 - minLogNormalizer}; + AlgebraicDecisionTree expected(discrete_keys, leaves); + + EXPECT(assert_equal(expected, actual, 1e-6)); + + // Check for non-tree version. + for (size_t mode : {0, 1}) { + const HybridValues hv{vv, {{M(0), mode}}}; + EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv) - + conditionals[mode]->logNormalizationConstant() - + minLogNormalizer, + mixture.error(hv), 1e-8); + } +} + /* ************************************************************************* */ /// Check that the likelihood is proportional to the conditional density given /// the measurements. From 5c72b5d0bc3d114e3887debd559749a6b0a674b9 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 18:38:46 -0400 Subject: [PATCH 273/398] update naming --- gtsam/hybrid/tests/testHybridGaussianConditional.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index 70cc55712..dde493685 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -176,7 +176,7 @@ TEST(HybridGaussianConditional, ContinuousParents) { /// Check error with mode dependent constants. TEST(HybridGaussianConditional, Error2) { using namespace mode_dependent_constants; - auto actual = mixture.errorTree(vv); + auto actual = hybrid_conditional.errorTree(vv); // Check result. DiscreteKeys discrete_keys{mode}; @@ -199,7 +199,7 @@ TEST(HybridGaussianConditional, Error2) { EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv) - conditionals[mode]->logNormalizationConstant() - minLogNormalizer, - mixture.error(hv), 1e-8); + hybrid_conditional.error(hv), 1e-8); } } From 97a7121c37e1aa193ff12af9da9f117b3f806d54 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 20 Sep 2024 02:34:40 -0400 Subject: [PATCH 274/398] remove duplicate method --- gtsam/hybrid/HybridGaussianConditional.cpp | 15 --------------- gtsam/hybrid/HybridGaussianConditional.h | 13 +++---------- 2 files changed, 3 insertions(+), 25 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index e0ae16e82..c31a9836f 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -78,21 +78,6 @@ HybridGaussianConditional::HybridGaussianConditional( DiscreteKeys{discreteParent}, Conditionals({discreteParent}, conditionals)) {} -/* *******************************************************************************/ -// TODO(dellaert): This is copy/paste: HybridGaussianConditional should be -// derived from HybridGaussianFactor, no? -GaussianFactorGraphTree HybridGaussianConditional::add( - const GaussianFactorGraphTree &sum) const { - using Y = GaussianFactorGraph; - auto add = [](const Y &graph1, const Y &graph2) { - auto result = graph1; - result.push_back(graph2); - return result; - }; - const auto tree = asGaussianFactorGraphTree(); - return sum.empty() ? tree : sum.apply(tree, add); -} - /* *******************************************************************************/ GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree() const { diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 434750bc9..5c3e500bd 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -108,8 +108,9 @@ class GTSAM_EXPORT HybridGaussianConditional const Conditionals &conditionals); /** - * @brief Make a Hybrid Gaussian Conditional from a vector of Gaussian conditionals. - * The DecisionTree-based constructor is preferred over this one. + * @brief Make a Hybrid Gaussian Conditional from a vector of Gaussian + * conditionals. The DecisionTree-based constructor is preferred over this + * one. * * @param continuousFrontals The continuous frontal variables * @param continuousParents The continuous parent variables @@ -233,14 +234,6 @@ class GTSAM_EXPORT HybridGaussianConditional */ void prune(const DecisionTreeFactor &discreteProbs); - /** - * @brief Merge the Gaussian Factor Graphs in `this` and `sum` while - * maintaining the decision tree structure. - * - * @param sum Decision Tree of Gaussian Factor Graphs - * @return GaussianFactorGraphTree - */ - GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const; /// @} private: From f3bfe7e1a1e8066fcf0cb6a69eeedbe0334f955d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 20 Sep 2024 10:00:49 -0400 Subject: [PATCH 275/398] consistent naming --- gtsam/hybrid/HybridGaussianConditional.cpp | 14 +++++++------- gtsam/hybrid/HybridGaussianConditional.h | 7 ++++--- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index c31a9836f..b408eea9c 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -51,14 +51,14 @@ HybridGaussianConditional::HybridGaussianConditional( discreteParents, GetFactorValuePairs(conditionals)), BaseConditional(continuousFrontals.size()), conditionals_(conditionals) { - // Calculate logNormalizer_ as the minimum of the log normalizers of the + // Calculate logConstant_ as the minimum of the log normalizers of the // conditionals, by visiting the decision tree: - logNormalizer_ = std::numeric_limits::infinity(); + logConstant_ = std::numeric_limits::infinity(); conditionals_.visit( [this](const GaussianConditional::shared_ptr &conditional) { if (conditional) { - this->logNormalizer_ = std::min( - this->logNormalizer_, -conditional->logNormalizationConstant()); + this->logConstant_ = std::min( + this->logConstant_, -conditional->logNormalizationConstant()); } }); } @@ -85,7 +85,7 @@ GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree() // First check if conditional has not been pruned if (gc) { const double Cgm_Kgcm = - -gc->logNormalizationConstant() - this->logNormalizer_; + -this->logConstant_ - gc->logNormalizationConstant(); // If there is a difference in the covariances, we need to account for // that since the error is dependent on the mode. if (Cgm_Kgcm > 0.0) { @@ -216,7 +216,7 @@ std::shared_ptr HybridGaussianConditional::likelihood( -> GaussianFactorValuePair { const auto likelihood_m = conditional->likelihood(given); const double Cgm_Kgcm = - -conditional->logNormalizationConstant() - logNormalizer_; + -logConstant_ - conditional->logNormalizationConstant(); if (Cgm_Kgcm == 0.0) { return {likelihood_m, 0.0}; } else { @@ -330,7 +330,7 @@ double HybridGaussianConditional::conditionalError( // Check if valid pointer if (conditional) { return conditional->error(continuousValues) + // - -conditional->logNormalizationConstant() - logNormalizer_; + -logConstant_ - conditional->logNormalizationConstant(); } else { // If not valid, pointer, it means this conditional was pruned, // so we return maximum error. diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 5c3e500bd..72a999472 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -64,8 +64,9 @@ class GTSAM_EXPORT HybridGaussianConditional private: Conditionals conditionals_; ///< a decision tree of Gaussian conditionals. - double logNormalizer_; ///< log of the normalization constant - ///< (log(\sqrt(|2πΣ|))). + ///< Negative-log of the normalization constant (log(\sqrt(|2πΣ|))). + ///< Take advantage of the neg-log space so everything is a minimization + double logConstant_; /** * @brief Convert a HybridGaussianConditional of conditionals into @@ -151,7 +152,7 @@ class GTSAM_EXPORT HybridGaussianConditional /// The log normalization constant is max of the the individual /// log-normalization constants. - double logNormalizationConstant() const override { return -logNormalizer_; } + double logNormalizationConstant() const override { return -logConstant_; } /** * Create a likelihood factor for a hybrid Gaussian conditional, From 245f3e042e6e62154657d0d65edbae5292fd0d83 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 20 Sep 2024 12:20:49 -0400 Subject: [PATCH 276/398] fix conversion --- gtsam/hybrid/HybridGaussianConditional.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index b408eea9c..0b1dc5337 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -35,8 +35,8 @@ HybridGaussianFactor::FactorValuePairs GetFactorValuePairs( double value = 0.0; // Check if conditional is pruned if (conditional) { - // Assign log(|2πΣ|) = -2*log(1 / sqrt(|2πΣ|)) - value = -2.0 * conditional->logNormalizationConstant(); + // Assign log(\sqrt(|2πΣ|)) = -log(1 / sqrt(|2πΣ|)) + value = -conditional->logNormalizationConstant(); } return {std::dynamic_pointer_cast(conditional), value}; }; From cd3c590f32b2047b56f6d843859c39caed3625ad Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 21:16:56 -0400 Subject: [PATCH 277/398] common errorTree method and its use in HybridGaussianFactorGraph --- gtsam/hybrid/HybridConditional.cpp | 16 +++++++++++ gtsam/hybrid/HybridConditional.h | 10 +++++++ gtsam/hybrid/HybridFactor.h | 4 +++ gtsam/hybrid/HybridGaussianConditional.h | 10 +++---- gtsam/hybrid/HybridGaussianFactor.h | 4 +-- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 31 ++++------------------ 6 files changed, 42 insertions(+), 33 deletions(-) diff --git a/gtsam/hybrid/HybridConditional.cpp b/gtsam/hybrid/HybridConditional.cpp index 074534b8d..3cb3bba65 100644 --- a/gtsam/hybrid/HybridConditional.cpp +++ b/gtsam/hybrid/HybridConditional.cpp @@ -129,6 +129,22 @@ double HybridConditional::error(const HybridValues &values) const { "HybridConditional::error: conditional type not handled"); } +/* ************************************************************************ */ +AlgebraicDecisionTree HybridConditional::errorTree( + const VectorValues &values) const { + if (auto gc = asGaussian()) { + return AlgebraicDecisionTree(gc->error(values)); + } + if (auto gm = asHybrid()) { + return gm->errorTree(values); + } + if (auto dc = asDiscrete()) { + return AlgebraicDecisionTree(0.0); + } + throw std::runtime_error( + "HybridConditional::error: conditional type not handled"); +} + /* ************************************************************************ */ double HybridConditional::logProbability(const HybridValues &values) const { if (auto gc = asGaussian()) { diff --git a/gtsam/hybrid/HybridConditional.h b/gtsam/hybrid/HybridConditional.h index f44ee2bf9..0009d6bd8 100644 --- a/gtsam/hybrid/HybridConditional.h +++ b/gtsam/hybrid/HybridConditional.h @@ -179,6 +179,16 @@ class GTSAM_EXPORT HybridConditional /// Return the error of the underlying conditional. double error(const HybridValues& values) const override; + /** + * @brief Compute error of the HybridConditional as a tree. + * + * @param continuousValues The continuous VectorValues. + * @return AlgebraicDecisionTree A decision tree with the same keys + * as the conditionals involved, and leaf values as the error. + */ + virtual AlgebraicDecisionTree errorTree( + const VectorValues& values) const override; + /// Return the log-probability (or density) of the underlying conditional. double logProbability(const HybridValues& values) const override; diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index ad29dfdca..fc91e0838 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -136,6 +136,10 @@ class GTSAM_EXPORT HybridFactor : public Factor { /// Return only the continuous keys for this factor. const KeyVector &continuousKeys() const { return continuousKeys_; } + /// Virtual class to compute tree of linear errors. + virtual AlgebraicDecisionTree errorTree( + const VectorValues &values) const = 0; + /// @} private: diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 72a999472..5e585acef 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -109,9 +109,9 @@ class GTSAM_EXPORT HybridGaussianConditional const Conditionals &conditionals); /** - * @brief Make a Hybrid Gaussian Conditional from a vector of Gaussian - * conditionals. The DecisionTree-based constructor is preferred over this - * one. + * @brief Make a Hybrid Gaussian Conditional from + * a vector of Gaussian conditionals. + * The DecisionTree-based constructor is preferred over this one. * * @param continuousFrontals The continuous frontal variables * @param continuousParents The continuous parent variables @@ -208,8 +208,8 @@ class GTSAM_EXPORT HybridGaussianConditional * @return AlgebraicDecisionTree A decision tree on the discrete keys * only, with the leaf values as the error for each assignment. */ - AlgebraicDecisionTree errorTree( - const VectorValues &continuousValues) const; + virtual AlgebraicDecisionTree errorTree( + const VectorValues &continuousValues) const override; /** * @brief Compute the logProbability of this hybrid Gaussian conditional. diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index a86714863..8d57ad7da 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -148,8 +148,8 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * @return AlgebraicDecisionTree A decision tree with the same keys * as the factors involved, and leaf values as the error. */ - AlgebraicDecisionTree errorTree( - const VectorValues &continuousValues) const; + virtual AlgebraicDecisionTree errorTree( + const VectorValues &continuousValues) const override; /** * @brief Compute the log-likelihood, including the log-normalizing constant. diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 28a0c446f..0d4e534e1 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -539,36 +539,15 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, AlgebraicDecisionTree HybridGaussianFactorGraph::errorTree( const VectorValues &continuousValues) const { AlgebraicDecisionTree error_tree(0.0); - // Iterate over each factor. for (auto &factor : factors_) { - // TODO(dellaert): just use a virtual method defined in HybridFactor. - AlgebraicDecisionTree factor_error; - - auto f = factor; - if (auto hc = dynamic_pointer_cast(factor)) { - f = hc->inner(); - } - - if (auto hybridGaussianCond = - dynamic_pointer_cast(f)) { - // Compute factor error and add it. - error_tree = error_tree + hybridGaussianCond->errorTree(continuousValues); - } else if (auto gaussian = dynamic_pointer_cast(f)) { - // If continuous only, get the (double) error - // and add it to the error_tree - double error = gaussian->error(continuousValues); - // Add the gaussian factor error to every leaf of the error tree. - error_tree = error_tree.apply( - [error](double leaf_value) { return leaf_value + error; }); - } else if (dynamic_pointer_cast(f)) { - // If factor at `idx` is discrete-only, we skip. - continue; - } else { - throwRuntimeError("HybridGaussianFactorGraph::error(VV)", f); + if (auto f = std::dynamic_pointer_cast(factor)) { + error_tree = error_tree + f->errorTree(continuousValues); + } else if (auto f = std::dynamic_pointer_cast(factor)) { + error_tree = + error_tree + AlgebraicDecisionTree(f->error(continuousValues)); } } - return error_tree; } From 9c3d7b0f3b95d6ab38e5e462c004413862939d7f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 20 Sep 2024 11:07:02 -0400 Subject: [PATCH 278/398] implement errorTree for HybridNonlinearFactor --- gtsam/hybrid/HybridNonlinearFactor.h | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 6da846abe..9852602de 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -74,6 +74,13 @@ class GTSAM_EXPORT HybridNonlinearFactor : public HybridFactor { /// Decision tree of Gaussian factors indexed by discrete keys. Factors factors_; + /// HybridFactor method implementation. Should not be used. + AlgebraicDecisionTree errorTree( + const VectorValues& continuousValues) const override { + throw std::runtime_error( + "HybridNonlinearFactor::error does not take VectorValues."); + } + public: HybridNonlinearFactor() = default; From 8231de2a92997a79d050a8673be4a30ea6a33d32 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 20 Sep 2024 12:55:21 -0400 Subject: [PATCH 279/398] rename tests to match file --- .../tests/testHybridNonlinearFactorGraph.cpp | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 621c8708e..a324585bf 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -51,7 +51,7 @@ using symbol_shorthand::X; * Test that any linearizedFactorGraph gaussian factors are appended to the * existing gaussian factor graph in the hybrid factor graph. */ -TEST(HybridFactorGraph, GaussianFactorGraph) { +TEST(HybridNonlinearFactorGraph, GaussianFactorGraph) { HybridNonlinearFactorGraph fg; // Add a simple prior factor to the nonlinear factor graph @@ -181,7 +181,7 @@ TEST(HybridGaussianFactorGraph, HybridNonlinearFactor) { /***************************************************************************** * Test push_back on HFG makes the correct distinction. */ -TEST(HybridFactorGraph, PushBack) { +TEST(HybridNonlinearFactorGraph, PushBack) { HybridNonlinearFactorGraph fg; auto nonlinearFactor = std::make_shared>(); @@ -240,7 +240,7 @@ TEST(HybridFactorGraph, PushBack) { /**************************************************************************** * Test construction of switching-like hybrid factor graph. */ -TEST(HybridFactorGraph, Switching) { +TEST(HybridNonlinearFactorGraph, Switching) { Switching self(3); EXPECT_LONGS_EQUAL(7, self.nonlinearFactorGraph.size()); @@ -250,7 +250,7 @@ TEST(HybridFactorGraph, Switching) { /**************************************************************************** * Test linearization on a switching-like hybrid factor graph. */ -TEST(HybridFactorGraph, Linearization) { +TEST(HybridNonlinearFactorGraph, Linearization) { Switching self(3); // Linearize here: @@ -263,7 +263,7 @@ TEST(HybridFactorGraph, Linearization) { /**************************************************************************** * Test elimination tree construction */ -TEST(HybridFactorGraph, EliminationTree) { +TEST(HybridNonlinearFactorGraph, EliminationTree) { Switching self(3); // Create ordering. @@ -372,7 +372,7 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) { /**************************************************************************** * Test partial elimination */ -TEST(HybridFactorGraph, Partial_Elimination) { +TEST(HybridNonlinearFactorGraph, Partial_Elimination) { Switching self(3); auto linearizedFactorGraph = self.linearizedFactorGraph; @@ -401,7 +401,8 @@ TEST(HybridFactorGraph, Partial_Elimination) { EXPECT(remainingFactorGraph->at(2)->keys() == KeyVector({M(0), M(1)})); } -TEST(HybridFactorGraph, PrintErrors) { +/* ****************************************************************************/ +TEST(HybridNonlinearFactorGraph, PrintErrors) { Switching self(3); // Get nonlinear factor graph and add linear factors to be holistic @@ -424,7 +425,7 @@ TEST(HybridFactorGraph, PrintErrors) { /**************************************************************************** * Test full elimination */ -TEST(HybridFactorGraph, Full_Elimination) { +TEST(HybridNonlinearFactorGraph, Full_Elimination) { Switching self(3); auto linearizedFactorGraph = self.linearizedFactorGraph; @@ -492,7 +493,7 @@ TEST(HybridFactorGraph, Full_Elimination) { /**************************************************************************** * Test printing */ -TEST(HybridFactorGraph, Printing) { +TEST(HybridNonlinearFactorGraph, Printing) { Switching self(3); auto linearizedFactorGraph = self.linearizedFactorGraph; @@ -784,7 +785,7 @@ conditional 2: Hybrid P( x2 | m0 m1) * The issue arises if we eliminate a landmark variable first since it is not * connected to a HybridFactor. */ -TEST(HybridFactorGraph, DefaultDecisionTree) { +TEST(HybridNonlinearFactorGraph, DefaultDecisionTree) { HybridNonlinearFactorGraph fg; // Add a prior on pose x0 at the origin. From 9cbc7540d6bc83a476c1c20b4d3d2e4cb6ad19ee Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 20 Sep 2024 15:28:19 -0400 Subject: [PATCH 280/398] add error(HybridValues) to HybridNonlinearFactorGraph --- gtsam/hybrid/HybridNonlinearFactorGraph.h | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.h b/gtsam/hybrid/HybridNonlinearFactorGraph.h index 54dc9e93f..5a09f18d4 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.h +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.h @@ -86,6 +86,10 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph { */ std::shared_ptr linearize( const Values& continuousValues) const; + + /// Expose error(const HybridValues&) method. + using Base::error; + /// @} }; From c71f0336e2c1a4cdfa873cb08d966eddf273cecf Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 20 Sep 2024 15:30:06 -0400 Subject: [PATCH 281/398] add HybridNonlinearFactorGraph::error test --- .../tests/testHybridNonlinearFactorGraph.cpp | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index a324585bf..347cc5f1f 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -401,6 +401,37 @@ TEST(HybridNonlinearFactorGraph, Partial_Elimination) { EXPECT(remainingFactorGraph->at(2)->keys() == KeyVector({M(0), M(1)})); } +/* ****************************************************************************/ +TEST(HybridNonlinearFactorGraph, Error) { + Switching self(3); + HybridNonlinearFactorGraph fg = self.nonlinearFactorGraph; + + { + HybridValues values(VectorValues(), DiscreteValues{{M(0), 0}, {M(1), 0}}, + self.linearizationPoint); + // regression + EXPECT_DOUBLES_EQUAL(152.791759469, fg.error(values), 1e-9); + } + { + HybridValues values(VectorValues(), DiscreteValues{{M(0), 0}, {M(1), 1}}, + self.linearizationPoint); + // regression + EXPECT_DOUBLES_EQUAL(151.598612289, fg.error(values), 1e-9); + } + { + HybridValues values(VectorValues(), DiscreteValues{{M(0), 1}, {M(1), 0}}, + self.linearizationPoint); + // regression + EXPECT_DOUBLES_EQUAL(151.703972804, fg.error(values), 1e-9); + } + { + HybridValues values(VectorValues(), DiscreteValues{{M(0), 1}, {M(1), 1}}, + self.linearizationPoint); + // regression + EXPECT_DOUBLES_EQUAL(151.609437912, fg.error(values), 1e-9); + } +} + /* ****************************************************************************/ TEST(HybridNonlinearFactorGraph, PrintErrors) { Switching self(3); From d145872916868e6432ea1d574c9167356ab39ab0 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 20 Sep 2024 15:30:24 -0400 Subject: [PATCH 282/398] comment update --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 0d4e534e1..01ecfe5ac 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -329,8 +329,8 @@ static std::shared_ptr createDiscreteFactor( // Logspace version of: // exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); - // We take negative of the logNormalizationConstant `log(1/k)` - // to get `log(k)`. + // We take negative of the logNormalizationConstant `log(k)` + // to get `log(1/k) = log(\sqrt{|2πΣ|})`. return -factor->error(kEmpty) - conditional->logNormalizationConstant(); }; From 939fdcc7201729a2ef686087d6a178ba5cf14ef0 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 20 Sep 2024 16:20:49 -0400 Subject: [PATCH 283/398] HybridGaussianFactorGraph::errorTree is better encompassing --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 01ecfe5ac..a6fe955eb 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -542,10 +542,15 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::errorTree( // Iterate over each factor. for (auto &factor : factors_) { if (auto f = std::dynamic_pointer_cast(factor)) { + // Check for HybridFactor, and call errorTree error_tree = error_tree + f->errorTree(continuousValues); - } else if (auto f = std::dynamic_pointer_cast(factor)) { - error_tree = - error_tree + AlgebraicDecisionTree(f->error(continuousValues)); + } else if (auto f = std::dynamic_pointer_cast(factor)) { + // Skip discrete factors + continue; + } else { + // Everything else is a continuous only factor + HybridValues hv(continuousValues, DiscreteValues()); + error_tree = error_tree + AlgebraicDecisionTree(factor->error(hv)); } } return error_tree; From 9567006b6be75ff997e7c9091dd994096087583e Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Fri, 20 Sep 2024 13:45:32 -0700 Subject: [PATCH 284/398] Use 'F' order --- python/CustomFactors.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/python/CustomFactors.md b/python/CustomFactors.md index 39a840e34..0a387bb4f 100644 --- a/python/CustomFactors.md +++ b/python/CustomFactors.md @@ -18,12 +18,14 @@ def error_func(this: gtsam.CustomFactor, v: gtsam.Values, H: List[np.ndarray]) - `this` is a reference to the `CustomFactor` object. This is required because one can reuse the same `error_func` for multiple factors. `v` is a reference to the current set of values, and `H` is a list of **references** to the list of required Jacobians (see the corresponding C++ documentation). Note that -the error returned must be a 1D numpy array. +the error returned must be a 1D `numpy` array. If `H` is `None`, it means the current factor evaluation does not need Jacobians. For example, the `error` method on a factor does not need Jacobians, so we don't evaluate them to save CPU. If `H` is not `None`, each entry of `H` can be assigned a (2D) `numpy` array, as the Jacobian for the corresponding variable. +All `numpy` matrices inside should be using `order="F"` to maintain interoperability with C++. + After defining `error_func`, one can create a `CustomFactor` just like any other factor in GTSAM: ```python From 4b04ae0944f8d8ec0b8bd1f5b4a2eba7e50502ae Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Fri, 20 Sep 2024 18:11:47 -0700 Subject: [PATCH 285/398] Code from Joel --- python/gtsam/utils/numerical_derivative.py | 228 +++++++++++++++++++++ 1 file changed, 228 insertions(+) create mode 100644 python/gtsam/utils/numerical_derivative.py diff --git a/python/gtsam/utils/numerical_derivative.py b/python/gtsam/utils/numerical_derivative.py new file mode 100644 index 000000000..eb59d6a80 --- /dev/null +++ b/python/gtsam/utils/numerical_derivative.py @@ -0,0 +1,228 @@ +""" +GTSAM Copyright 2010-2019, Georgia Tech Research Corporation, +Atlanta, Georgia 30332-0415 +All Rights Reserved + +See LICENSE for the license information + +TestCase class with GTSAM assert utils. +Author: Joel Truher & Frank Dellaert +""" + +# pylint: disable=C0103,C0114,C0116,E0611,R0913 +# mypy: disable-error-code="import-untyped" +# see numericalDerivative.h + +# pybind wants to wrap concrete types, which would have been +# a whole lot of them, so i reimplemented the part of this that +# I needed, using the python approach to "generic" typing. + +from typing import Callable, TypeVar +import numpy as np + +Y = TypeVar("Y") +X = TypeVar("X") +X1 = TypeVar("X1") +X2 = TypeVar("X2") +X3 = TypeVar("X3") +X4 = TypeVar("X4") +X5 = TypeVar("X5") +X6 = TypeVar("X6") + + +def local(a: Y, b: Y) -> np.ndarray: + if type(a) is not type(b): + raise TypeError(f"a {type(a)} b {type(b)}") + if isinstance(a, np.ndarray): + return b - a + if isinstance(a, (float, int)): + return np.array([[b - a]]) # type:ignore + # there is no common superclass for Y + return a.localCoordinates(b) # type:ignore + + +def retract(a, b): + if isinstance(a, (np.ndarray, float, int)): + return a + b + return a.retract(b) + + +def numericalDerivative11(h: Callable[[X], Y], x: X, delta=1e-5) -> np.ndarray: + hx: Y = h(x) + zeroY = local(hx, hx) + m = zeroY.shape[0] + zeroX = local(x, x) + N = zeroX.shape[0] + dx = np.zeros(N) + H = np.zeros((m, N)) + factor: float = 1.0 / (2.0 * delta) + for j in range(N): + dx[j] = delta + dy1 = local(hx, h(retract(x, dx))) + dx[j] = -delta + dy2 = local(hx, h(retract(x, dx))) + dx[j] = 0 + H[:, j] = (dy1 - dy2) * factor + return H + + +def numericalDerivative21( + h: Callable[[X1, X2], Y], x1: X1, x2: X2, delta=1e-5 +) -> np.array: + return numericalDerivative11(lambda x: h(x, x2), x1, delta) + + +def numericalDerivative22( + h: Callable[[X1, X2], Y], x1: X1, x2: X2, delta=1e-5 +) -> np.array: + return numericalDerivative11(lambda x: h(x1, x), x2, delta) + + +def numericalDerivative31( + h: Callable[[X1, X2, X3], Y], x1: X1, x2: X2, x3: X3, delta=1e-5 +) -> np.array: + return numericalDerivative11(lambda x: h(x, x2, x3), x1, delta) + + +def numericalDerivative32( + h: Callable[[X1, X2, X3], Y], x1: X1, x2: X2, x3: X3, delta=1e-5 +) -> np.array: + return numericalDerivative11(lambda x: h(x1, x, x3), x2, delta) + + +def numericalDerivative33( + h: Callable[[X1, X2, X3], Y], x1: X1, x2: X2, x3: X3, delta=1e-5 +) -> np.array: + return numericalDerivative11(lambda x: h(x1, x2, x), x3, delta) + + +def numericalDerivative41( + h: Callable[[X1, X2, X3, X4], Y], x1: X1, x2: X2, x3: X3, x4: X4, delta=1e-5 +) -> np.array: + return numericalDerivative11(lambda x: h(x, x2, x3, x4), x1, delta) + + +def numericalDerivative42( + h: Callable[[X1, X2, X3, X4], Y], x1: X1, x2: X2, x3: X3, x4: X4, delta=1e-5 +) -> np.array: + return numericalDerivative11(lambda x: h(x1, x, x3, x4), x2, delta) + + +def numericalDerivative43( + h: Callable[[X1, X2, X3, X4], Y], x1: X1, x2: X2, x3: X3, x4: X4, delta=1e-5 +) -> np.array: + return numericalDerivative11(lambda x: h(x1, x2, x, x4), x3, delta) + + +def numericalDerivative44( + h: Callable[[X1, X2, X3, X4], Y], x1: X1, x2: X2, x3: X3, x4: X4, delta=1e-5 +) -> np.array: + return numericalDerivative11(lambda x: h(x1, x2, x3, x), x4, delta) + + +def numericalDerivative51( + h: Callable[[X1, X2, X3, X4, X5], Y], x1: X1, x2: X2, x3: X3, x4: X4, x5: X5, delta=1e-5 +) -> np.array: + return numericalDerivative11(lambda x: h(x, x2, x3, x4, x5), x1, delta) + + +def numericalDerivative52( + h: Callable[[X1, X2, X3, X4, X5], Y], x1: X1, x2: X2, x3: X3, x4: X4, x5: X5, delta=1e-5 +) -> np.array: + return numericalDerivative11(lambda x: h(x1, x, x3, x4, x5), x2, delta) + + +def numericalDerivative53( + h: Callable[[X1, X2, X3, X4, X5], Y], x1: X1, x2: X2, x3: X3, x4: X4, x5: X5, delta=1e-5 +) -> np.array: + return numericalDerivative11(lambda x: h(x1, x2, x, x4, x5), x3, delta) + + +def numericalDerivative54( + h: Callable[[X1, X2, X3, X4, X5], Y], x1: X1, x2: X2, x3: X3, x4: X4, x5: X5, delta=1e-5 +) -> np.array: + return numericalDerivative11(lambda x: h(x1, x2, x3, x, x5), x4, delta) + + +def numericalDerivative55( + h: Callable[[X1, X2, X3, X4, X5], Y], x1: X1, x2: X2, x3: X3, x4: X4, x5: X5, delta=1e-5 +) -> np.array: + return numericalDerivative11(lambda x: h(x1, x2, x3, x4, x), x5, delta) + + +def numericalDerivative61( + h: Callable[[X1, X2, X3, X4, X5, X6], Y], + x1: X1, + x2: X2, + x3: X3, + x4: X4, + x5: X5, + x6: X6, + delta=1e-5, +) -> np.array: + return numericalDerivative11(lambda x: h(x, x2, x3, x4, x5, x6), x1, delta) + + +def numericalDerivative62( + h: Callable[[X1, X2, X3, X4, X5, X6], Y], + x1: X1, + x2: X2, + x3: X3, + x4: X4, + x5: X5, + x6: X6, + delta=1e-5, +) -> np.array: + return numericalDerivative11(lambda x: h(x1, x, x3, x4, x5, x6), x2, delta) + + +def numericalDerivative63( + h: Callable[[X1, X2, X3, X4, X5, X6], Y], + x1: X1, + x2: X2, + x3: X3, + x4: X4, + x5: X5, + x6: X6, + delta=1e-5, +) -> np.array: + return numericalDerivative11(lambda x: h(x1, x2, x, x4, x5, x6), x3, delta) + + +def numericalDerivative64( + h: Callable[[X1, X2, X3, X4, X5, X6], Y], + x1: X1, + x2: X2, + x3: X3, + x4: X4, + x5: X5, + x6: X6, + delta=1e-5, +) -> np.array: + return numericalDerivative11(lambda x: h(x1, x2, x3, x, x5, x6), x4, delta) + + +def numericalDerivative65( + h: Callable[[X1, X2, X3, X4, X5, X6], Y], + x1: X1, + x2: X2, + x3: X3, + x4: X4, + x5: X5, + x6: X6, + delta=1e-5, +) -> np.array: + return numericalDerivative11(lambda x: h(x1, x2, x3, x4, x, x6), x5, delta) + + +def numericalDerivative66( + h: Callable[[X1, X2, X3, X4, X5, X6], Y], + x1: X1, + x2: X2, + x3: X3, + x4: X4, + x5: X5, + x6: X6, + delta=1e-5, +) -> np.array: + return numericalDerivative11(lambda x: h(x1, x2, x3, x4, x5, x), x6, delta) \ No newline at end of file From 9dbab04a32575d5b3468e36c9e65c85f68a358f3 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Fri, 20 Sep 2024 18:12:06 -0700 Subject: [PATCH 286/398] Tests with some help from chatgpt --- .../gtsam/tests/test_numerical_derivative.py | 125 ++++++++++++++++++ 1 file changed, 125 insertions(+) create mode 100644 python/gtsam/tests/test_numerical_derivative.py diff --git a/python/gtsam/tests/test_numerical_derivative.py b/python/gtsam/tests/test_numerical_derivative.py new file mode 100644 index 000000000..9bbdec277 --- /dev/null +++ b/python/gtsam/tests/test_numerical_derivative.py @@ -0,0 +1,125 @@ +""" +GTSAM Copyright 2010-2019, Georgia Tech Research Corporation, +Atlanta, Georgia 30332-0415 +All Rights Reserved + +See LICENSE for the license information + +Unit tests for IMU testing scenarios. +Author: Frank Dellaert & Joel Truher +""" +# pylint: disable=invalid-name, no-name-in-module + +import unittest +import numpy as np + +from gtsam import Pose3, Rot3, Point3 +from gtsam.utils.numerical_derivative import numericalDerivative11, numericalDerivative21, numericalDerivative22, numericalDerivative33 + + +class TestNumericalDerivatives(unittest.TestCase): + def test_numericalDerivative11_scalar(self): + # Test function of one variable + def h(x): + return x ** 2 + + x = np.array([3.0]) + # Analytical derivative: dh/dx = 2x + analytical_derivative = np.array([[2.0 * x[0]]]) + + # Compute numerical derivative + numerical_derivative = numericalDerivative11(h, x) + + # Check if numerical derivative is close to analytical derivative + np.testing.assert_allclose( + numerical_derivative, analytical_derivative, rtol=1e-5 + ) + + def test_numericalDerivative11_vector(self): + # Test function of one vector variable + def h(x): + return x ** 2 + + x = np.array([1.0, 2.0, 3.0]) + # Analytical derivative: dh/dx = 2x + analytical_derivative = np.diag(2.0 * x) + + numerical_derivative = numericalDerivative11(h, x) + + np.testing.assert_allclose( + numerical_derivative, analytical_derivative, rtol=1e-5 + ) + + def test_numericalDerivative21(self): + # Test function of two variables, derivative with respect to first variable + def h(x1, x2): + return x1 * np.sin(x2) + + x1 = np.array([2.0]) + x2 = np.array([np.pi / 4]) + # Analytical derivative: dh/dx1 = sin(x2) + analytical_derivative = np.array([[np.sin(x2[0])]]) + + numerical_derivative = numericalDerivative21(h, x1, x2) + + np.testing.assert_allclose( + numerical_derivative, analytical_derivative, rtol=1e-5 + ) + + def test_numericalDerivative22(self): + # Test function of two variables, derivative with respect to second variable + def h(x1, x2): + return x1 * np.sin(x2) + + x1 = np.array([2.0]) + x2 = np.array([np.pi / 4]) + # Analytical derivative: dh/dx2 = x1 * cos(x2) + analytical_derivative = np.array([[x1[0] * np.cos(x2[0])]]) + + numerical_derivative = numericalDerivative22(h, x1, x2) + + np.testing.assert_allclose( + numerical_derivative, analytical_derivative, rtol=1e-5 + ) + + def test_numericalDerivative33(self): + # Test function of three variables, derivative with respect to third variable + def h(x1, x2, x3): + return x1 * x2 + np.exp(x3) + + x1 = np.array([1.0]) + x2 = np.array([2.0]) + x3 = np.array([0.5]) + # Analytical derivative: dh/dx3 = exp(x3) + analytical_derivative = np.array([[np.exp(x3[0])]]) + + numerical_derivative = numericalDerivative33(h, x1, x2, x3) + + np.testing.assert_allclose( + numerical_derivative, analytical_derivative, rtol=1e-5 + ) + + def test_numericalDerivative_with_pose(self): + # Test function with manifold and vector inputs + + def h(pose:Pose3, point:np.ndarray): + return pose.transformFrom(point) + + # Values from testPose3.cpp + P = Point3(0.2,0.7,-2) + R = Rot3.Rodrigues(0.3,0,0) + P2 = Point3(3.5,-8.2,4.2) + T = Pose3(R,P2) + + analytic_H1 = np.zeros((3,6), order='F', dtype=float) + analytic_H2 = np.zeros((3,3), order='F', dtype=float) + y = T.transformFrom(P, analytic_H1, analytic_H2) + + numerical_H1 = numericalDerivative21(h, T, P) + numerical_H2 = numericalDerivative22(h, T, P) + + np.testing.assert_allclose(numerical_H1, analytic_H1, rtol=1e-5) + np.testing.assert_allclose(numerical_H2, analytic_H2, rtol=1e-5) + +if __name__ == "__main__": + unittest.main() \ No newline at end of file From d0084a97c3e7d8aecf71f95c11467f1d9b6e1001 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Fri, 20 Sep 2024 18:12:24 -0700 Subject: [PATCH 287/398] Use new numdiff functions --- python/gtsam/tests/test_Pose3.py | 72 +++++--------------------------- 1 file changed, 10 insertions(+), 62 deletions(-) diff --git a/python/gtsam/tests/test_Pose3.py b/python/gtsam/tests/test_Pose3.py index 32b3ad47f..86c5bc9a4 100644 --- a/python/gtsam/tests/test_Pose3.py +++ b/python/gtsam/tests/test_Pose3.py @@ -17,63 +17,7 @@ from gtsam.utils.test_case import GtsamTestCase import gtsam from gtsam import Point3, Pose3, Rot3 - - -def numerical_derivative_pose(pose, method, delta=1e-5): - jacobian = np.zeros((6, 6)) - for idx in range(6): - xplus = np.zeros(6) - xplus[idx] = delta - xminus = np.zeros(6) - xminus[idx] = -delta - pose_plus = pose.retract(xplus).__getattribute__(method)() - pose_minus = pose.retract(xminus).__getattribute__(method)() - jacobian[:, idx] = pose_minus.localCoordinates(pose_plus) / (2 * delta) - return jacobian - - -def numerical_derivative_2_poses(pose, other_pose, method, delta=1e-5, inputs=()): - jacobian = np.zeros((6, 6)) - other_jacobian = np.zeros((6, 6)) - for idx in range(6): - xplus = np.zeros(6) - xplus[idx] = delta - xminus = np.zeros(6) - xminus[idx] = -delta - - pose_plus = pose.retract(xplus).__getattribute__(method)(*inputs, other_pose) - pose_minus = pose.retract(xminus).__getattribute__(method)(*inputs, other_pose) - jacobian[:, idx] = pose_minus.localCoordinates(pose_plus) / (2 * delta) - - other_pose_plus = pose.__getattribute__(method)(*inputs, other_pose.retract(xplus)) - other_pose_minus = pose.__getattribute__(method)(*inputs, other_pose.retract(xminus)) - other_jacobian[:, idx] = other_pose_minus.localCoordinates(other_pose_plus) / (2 * delta) - return jacobian, other_jacobian - - -def numerical_derivative_pose_point(pose, point, method, delta=1e-5): - jacobian = np.zeros((3, 6)) - point_jacobian = np.zeros((3, 3)) - for idx in range(6): - xplus = np.zeros(6) - xplus[idx] = delta - xminus = np.zeros(6) - xminus[idx] = -delta - - point_plus = pose.retract(xplus).__getattribute__(method)(point) - point_minus = pose.retract(xminus).__getattribute__(method)(point) - jacobian[:, idx] = (point_plus - point_minus) / (2 * delta) - - if idx < 3: - xplus = np.zeros(3) - xplus[idx] = delta - xminus = np.zeros(3) - xminus[idx] = -delta - point_plus = pose.__getattribute__(method)(point + xplus) - point_minus = pose.__getattribute__(method)(point + xminus) - point_jacobian[:, idx] = (point_plus - point_minus) / (2 * delta) - return jacobian, point_jacobian - +from gtsam.utils.numerical_derivative import numericalDerivative11, numericalDerivative21, numericalDerivative22 class TestPose3(GtsamTestCase): """Test selected Pose3 methods.""" @@ -90,7 +34,8 @@ class TestPose3(GtsamTestCase): jacobian = np.zeros((6, 6), order='F') jacobian_other = np.zeros((6, 6), order='F') T2.between(T3, jacobian, jacobian_other) - jacobian_numerical, jacobian_numerical_other = numerical_derivative_2_poses(T2, T3, 'between') + jacobian_numerical = numericalDerivative21(Pose3.between, T2, T3) + jacobian_numerical_other = numericalDerivative22(Pose3.between, T2, T3) self.gtsamAssertEquals(jacobian, jacobian_numerical) self.gtsamAssertEquals(jacobian_other, jacobian_numerical_other) @@ -104,7 +49,7 @@ class TestPose3(GtsamTestCase): #test jacobians jacobian = np.zeros((6, 6), order='F') pose.inverse(jacobian) - jacobian_numerical = numerical_derivative_pose(pose, 'inverse') + jacobian_numerical = numericalDerivative11(Pose3.inverse, pose) self.gtsamAssertEquals(jacobian, jacobian_numerical) def test_slerp(self): @@ -123,7 +68,8 @@ class TestPose3(GtsamTestCase): jacobian = np.zeros((6, 6), order='F') jacobian_other = np.zeros((6, 6), order='F') pose0.slerp(0.5, pose1, jacobian, jacobian_other) - jacobian_numerical, jacobian_numerical_other = numerical_derivative_2_poses(pose0, pose1, 'slerp', inputs=[0.5]) + jacobian_numerical = numericalDerivative11(lambda x: x.slerp(0.5, pose1), pose0) + jacobian_numerical_other = numericalDerivative11(lambda x: pose0.slerp(0.5, x), pose1) self.gtsamAssertEquals(jacobian, jacobian_numerical) self.gtsamAssertEquals(jacobian_other, jacobian_numerical_other) @@ -139,7 +85,8 @@ class TestPose3(GtsamTestCase): jacobian_pose = np.zeros((3, 6), order='F') jacobian_point = np.zeros((3, 3), order='F') pose.transformTo(point, jacobian_pose, jacobian_point) - jacobian_numerical_pose, jacobian_numerical_point = numerical_derivative_pose_point(pose, point, 'transformTo') + jacobian_numerical_pose = numericalDerivative21(Pose3.transformTo, pose, point) + jacobian_numerical_point = numericalDerivative22(Pose3.transformTo, pose, point) self.gtsamAssertEquals(jacobian_pose, jacobian_numerical_pose) self.gtsamAssertEquals(jacobian_point, jacobian_numerical_point) @@ -162,7 +109,8 @@ class TestPose3(GtsamTestCase): jacobian_pose = np.zeros((3, 6), order='F') jacobian_point = np.zeros((3, 3), order='F') pose.transformFrom(point, jacobian_pose, jacobian_point) - jacobian_numerical_pose, jacobian_numerical_point = numerical_derivative_pose_point(pose, point, 'transformFrom') + jacobian_numerical_pose = numericalDerivative21(Pose3.transformFrom, pose, point) + jacobian_numerical_point = numericalDerivative22(Pose3.transformFrom, pose, point) self.gtsamAssertEquals(jacobian_pose, jacobian_numerical_pose) self.gtsamAssertEquals(jacobian_point, jacobian_numerical_point) From 5c80174c0be0e527bd64d779c57c2504461ea04f Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Fri, 20 Sep 2024 22:52:58 -0700 Subject: [PATCH 288/398] Fix small issues --- .../gtsam/tests/test_numerical_derivative.py | 2 +- python/gtsam/utils/numerical_derivative.py | 52 +++++++++---------- 2 files changed, 27 insertions(+), 27 deletions(-) diff --git a/python/gtsam/tests/test_numerical_derivative.py b/python/gtsam/tests/test_numerical_derivative.py index 9bbdec277..47d259585 100644 --- a/python/gtsam/tests/test_numerical_derivative.py +++ b/python/gtsam/tests/test_numerical_derivative.py @@ -5,7 +5,7 @@ All Rights Reserved See LICENSE for the license information -Unit tests for IMU testing scenarios. +Unit tests for IMU numerical_derivative module. Author: Frank Dellaert & Joel Truher """ # pylint: disable=invalid-name, no-name-in-module diff --git a/python/gtsam/utils/numerical_derivative.py b/python/gtsam/utils/numerical_derivative.py index eb59d6a80..3b52f5a5f 100644 --- a/python/gtsam/utils/numerical_derivative.py +++ b/python/gtsam/utils/numerical_derivative.py @@ -5,7 +5,7 @@ All Rights Reserved See LICENSE for the license information -TestCase class with GTSAM assert utils. +Numerical derivative functions. Author: Joel Truher & Frank Dellaert """ @@ -36,15 +36,15 @@ def local(a: Y, b: Y) -> np.ndarray: if isinstance(a, np.ndarray): return b - a if isinstance(a, (float, int)): - return np.array([[b - a]]) # type:ignore + return np.ndarray([[b - a]]) # type:ignore # there is no common superclass for Y return a.localCoordinates(b) # type:ignore -def retract(a, b): +def retract(a, xi: np.ndarray): if isinstance(a, (np.ndarray, float, int)): - return a + b - return a.retract(b) + return a + xi + return a.retract(xi) def numericalDerivative11(h: Callable[[X], Y], x: X, delta=1e-5) -> np.ndarray: @@ -68,85 +68,85 @@ def numericalDerivative11(h: Callable[[X], Y], x: X, delta=1e-5) -> np.ndarray: def numericalDerivative21( h: Callable[[X1, X2], Y], x1: X1, x2: X2, delta=1e-5 -) -> np.array: +) -> np.ndarray: return numericalDerivative11(lambda x: h(x, x2), x1, delta) def numericalDerivative22( h: Callable[[X1, X2], Y], x1: X1, x2: X2, delta=1e-5 -) -> np.array: +) -> np.ndarray: return numericalDerivative11(lambda x: h(x1, x), x2, delta) def numericalDerivative31( h: Callable[[X1, X2, X3], Y], x1: X1, x2: X2, x3: X3, delta=1e-5 -) -> np.array: +) -> np.ndarray: return numericalDerivative11(lambda x: h(x, x2, x3), x1, delta) def numericalDerivative32( h: Callable[[X1, X2, X3], Y], x1: X1, x2: X2, x3: X3, delta=1e-5 -) -> np.array: +) -> np.ndarray: return numericalDerivative11(lambda x: h(x1, x, x3), x2, delta) def numericalDerivative33( h: Callable[[X1, X2, X3], Y], x1: X1, x2: X2, x3: X3, delta=1e-5 -) -> np.array: +) -> np.ndarray: return numericalDerivative11(lambda x: h(x1, x2, x), x3, delta) def numericalDerivative41( h: Callable[[X1, X2, X3, X4], Y], x1: X1, x2: X2, x3: X3, x4: X4, delta=1e-5 -) -> np.array: +) -> np.ndarray: return numericalDerivative11(lambda x: h(x, x2, x3, x4), x1, delta) def numericalDerivative42( h: Callable[[X1, X2, X3, X4], Y], x1: X1, x2: X2, x3: X3, x4: X4, delta=1e-5 -) -> np.array: +) -> np.ndarray: return numericalDerivative11(lambda x: h(x1, x, x3, x4), x2, delta) def numericalDerivative43( h: Callable[[X1, X2, X3, X4], Y], x1: X1, x2: X2, x3: X3, x4: X4, delta=1e-5 -) -> np.array: +) -> np.ndarray: return numericalDerivative11(lambda x: h(x1, x2, x, x4), x3, delta) def numericalDerivative44( h: Callable[[X1, X2, X3, X4], Y], x1: X1, x2: X2, x3: X3, x4: X4, delta=1e-5 -) -> np.array: +) -> np.ndarray: return numericalDerivative11(lambda x: h(x1, x2, x3, x), x4, delta) def numericalDerivative51( h: Callable[[X1, X2, X3, X4, X5], Y], x1: X1, x2: X2, x3: X3, x4: X4, x5: X5, delta=1e-5 -) -> np.array: +) -> np.ndarray: return numericalDerivative11(lambda x: h(x, x2, x3, x4, x5), x1, delta) def numericalDerivative52( h: Callable[[X1, X2, X3, X4, X5], Y], x1: X1, x2: X2, x3: X3, x4: X4, x5: X5, delta=1e-5 -) -> np.array: +) -> np.ndarray: return numericalDerivative11(lambda x: h(x1, x, x3, x4, x5), x2, delta) def numericalDerivative53( h: Callable[[X1, X2, X3, X4, X5], Y], x1: X1, x2: X2, x3: X3, x4: X4, x5: X5, delta=1e-5 -) -> np.array: +) -> np.ndarray: return numericalDerivative11(lambda x: h(x1, x2, x, x4, x5), x3, delta) def numericalDerivative54( h: Callable[[X1, X2, X3, X4, X5], Y], x1: X1, x2: X2, x3: X3, x4: X4, x5: X5, delta=1e-5 -) -> np.array: +) -> np.ndarray: return numericalDerivative11(lambda x: h(x1, x2, x3, x, x5), x4, delta) def numericalDerivative55( h: Callable[[X1, X2, X3, X4, X5], Y], x1: X1, x2: X2, x3: X3, x4: X4, x5: X5, delta=1e-5 -) -> np.array: +) -> np.ndarray: return numericalDerivative11(lambda x: h(x1, x2, x3, x4, x), x5, delta) @@ -159,7 +159,7 @@ def numericalDerivative61( x5: X5, x6: X6, delta=1e-5, -) -> np.array: +) -> np.ndarray: return numericalDerivative11(lambda x: h(x, x2, x3, x4, x5, x6), x1, delta) @@ -172,7 +172,7 @@ def numericalDerivative62( x5: X5, x6: X6, delta=1e-5, -) -> np.array: +) -> np.ndarray: return numericalDerivative11(lambda x: h(x1, x, x3, x4, x5, x6), x2, delta) @@ -185,7 +185,7 @@ def numericalDerivative63( x5: X5, x6: X6, delta=1e-5, -) -> np.array: +) -> np.ndarray: return numericalDerivative11(lambda x: h(x1, x2, x, x4, x5, x6), x3, delta) @@ -198,7 +198,7 @@ def numericalDerivative64( x5: X5, x6: X6, delta=1e-5, -) -> np.array: +) -> np.ndarray: return numericalDerivative11(lambda x: h(x1, x2, x3, x, x5, x6), x4, delta) @@ -211,7 +211,7 @@ def numericalDerivative65( x5: X5, x6: X6, delta=1e-5, -) -> np.array: +) -> np.ndarray: return numericalDerivative11(lambda x: h(x1, x2, x3, x4, x, x6), x5, delta) @@ -224,5 +224,5 @@ def numericalDerivative66( x5: X5, x6: X6, delta=1e-5, -) -> np.array: - return numericalDerivative11(lambda x: h(x1, x2, x3, x4, x5, x), x6, delta) \ No newline at end of file +) -> np.ndarray: + return numericalDerivative11(lambda x: h(x1, x2, x3, x4, x5, x), x6, delta) From 4c8224800424acda8e673cb018fb8a5a400d2d5a Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 21 Sep 2024 03:09:00 -0400 Subject: [PATCH 289/398] remove virtual --- gtsam/hybrid/HybridConditional.h | 2 +- gtsam/hybrid/HybridGaussianFactor.h | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridConditional.h b/gtsam/hybrid/HybridConditional.h index 0009d6bd8..51eeeb5bb 100644 --- a/gtsam/hybrid/HybridConditional.h +++ b/gtsam/hybrid/HybridConditional.h @@ -186,7 +186,7 @@ class GTSAM_EXPORT HybridConditional * @return AlgebraicDecisionTree A decision tree with the same keys * as the conditionals involved, and leaf values as the error. */ - virtual AlgebraicDecisionTree errorTree( + AlgebraicDecisionTree errorTree( const VectorValues& values) const override; /// Return the log-probability (or density) of the underlying conditional. diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 8d57ad7da..3bf9f3dfd 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -148,7 +148,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * @return AlgebraicDecisionTree A decision tree with the same keys * as the factors involved, and leaf values as the error. */ - virtual AlgebraicDecisionTree errorTree( + AlgebraicDecisionTree errorTree( const VectorValues &continuousValues) const override; /** From d81cd82b9a6e27e51104faf00c9915656a670799 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 21 Sep 2024 03:24:01 -0400 Subject: [PATCH 290/398] check for potentiall pruning --- gtsam/hybrid/HybridGaussianFactor.cpp | 21 ++++++++++++++++++--- gtsam/hybrid/HybridGaussianFactor.h | 4 ++++ 2 files changed, 22 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index 2fbd4bd88..a4e0bf874 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -151,12 +151,26 @@ GaussianFactorGraphTree HybridGaussianFactor::asGaussianFactorGraphTree() return {factors_, wrap}; } +/* *******************************************************************************/ +double HybridGaussianFactor::potentiallyPrunedComponentError( + const sharedFactor &gf, const VectorValues &values) const { + // Check if valid pointer + if (gf) { + return gf->error(values); + } else { + // If not valid, pointer, it means this component was pruned, + // so we return maximum error. + // This way the negative exponential will give + // a probability value close to 0.0. + return std::numeric_limits::max(); + } +} /* *******************************************************************************/ AlgebraicDecisionTree HybridGaussianFactor::errorTree( const VectorValues &continuousValues) const { // functor to convert from sharedFactor to double error value. - auto errorFunc = [&continuousValues](const sharedFactor &gf) { - return gf->error(continuousValues); + auto errorFunc = [this, &continuousValues](const sharedFactor &gf) { + return this->potentiallyPrunedComponentError(gf, continuousValues); }; DecisionTree error_tree(factors_, errorFunc); return error_tree; @@ -164,8 +178,9 @@ AlgebraicDecisionTree HybridGaussianFactor::errorTree( /* *******************************************************************************/ double HybridGaussianFactor::error(const HybridValues &values) const { + // Directly index to get the component, no need to build the whole tree. const sharedFactor gf = factors_(values.discrete()); - return gf->error(values.continuous()); + return potentiallyPrunedComponentError(gf, values.continuous()); } } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 3bf9f3dfd..b1b93dc32 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -169,6 +169,10 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// @} private: + /// Helper method to compute the error of a component. + double potentiallyPrunedComponentError( + const sharedFactor &gf, const VectorValues &continuousValues) const; + #ifdef GTSAM_ENABLE_BOOST_SERIALIZATION /** Serialization function */ friend class boost::serialization::access; From 821b22f6f86292d0b1039285d9ae941f803b3b54 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 21 Sep 2024 03:24:29 -0400 Subject: [PATCH 291/398] remove unnecessary code in child class --- gtsam/hybrid/HybridGaussianConditional.cpp | 34 ------------------ gtsam/hybrid/HybridGaussianConditional.h | 41 ---------------------- 2 files changed, 75 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 0b1dc5337..fb943366c 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -323,40 +323,6 @@ AlgebraicDecisionTree HybridGaussianConditional::logProbability( return DecisionTree(conditionals_, probFunc); } -/* ************************************************************************* */ -double HybridGaussianConditional::conditionalError( - const GaussianConditional::shared_ptr &conditional, - const VectorValues &continuousValues) const { - // Check if valid pointer - if (conditional) { - return conditional->error(continuousValues) + // - -logConstant_ - conditional->logNormalizationConstant(); - } else { - // If not valid, pointer, it means this conditional was pruned, - // so we return maximum error. - // This way the negative exponential will give - // a probability value close to 0.0. - return std::numeric_limits::max(); - } -} - -/* *******************************************************************************/ -AlgebraicDecisionTree HybridGaussianConditional::errorTree( - const VectorValues &continuousValues) const { - auto errorFunc = [&](const GaussianConditional::shared_ptr &conditional) { - return conditionalError(conditional, continuousValues); - }; - DecisionTree error_tree(conditionals_, errorFunc); - return error_tree; -} - -/* *******************************************************************************/ -double HybridGaussianConditional::error(const HybridValues &values) const { - // Directly index to get the conditional, no need to build the whole tree. - auto conditional = conditionals_(values.discrete()); - return conditionalError(conditional, values.continuous()); -} - /* *******************************************************************************/ double HybridGaussianConditional::logProbability( const HybridValues &values) const { diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 5e585acef..4a5fdcc89 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -174,43 +174,6 @@ class GTSAM_EXPORT HybridGaussianConditional AlgebraicDecisionTree logProbability( const VectorValues &continuousValues) const; - /** - * @brief Compute the error of this hybrid Gaussian conditional. - * - * This requires some care, as different components may have - * different normalization constants. Let's consider p(x|y,m), where m is - * discrete. We need the error to satisfy the invariant: - * - * error(x;y,m) = K - log(probability(x;y,m)) - * - * For all x,y,m. But note that K, the (log) normalization constant defined - * in Conditional.h, should not depend on x, y, or m, only on the parameters - * of the density. Hence, we delegate to the underlying Gaussian - * conditionals, indexed by m, which do satisfy: - * - * log(probability_m(x;y)) = K_m - error_m(x;y) - * - * We resolve by having K == max(K_m) and - * - * error(x;y,m) = error_m(x;y) + K - K_m - * - * which also makes error(x;y,m) >= 0 for all x,y,m. - * - * @param values Continuous values and discrete assignment. - * @return double - */ - double error(const HybridValues &values) const override; - - /** - * @brief Compute error of the HybridGaussianConditional as a tree. - * - * @param continuousValues The continuous VectorValues. - * @return AlgebraicDecisionTree A decision tree on the discrete keys - * only, with the leaf values as the error for each assignment. - */ - virtual AlgebraicDecisionTree errorTree( - const VectorValues &continuousValues) const override; - /** * @brief Compute the logProbability of this hybrid Gaussian conditional. * @@ -241,10 +204,6 @@ class GTSAM_EXPORT HybridGaussianConditional /// Check whether `given` has values for all frontal keys. bool allFrontalsGiven(const VectorValues &given) const; - /// Helper method to compute the error of a conditional. - double conditionalError(const GaussianConditional::shared_ptr &conditional, - const VectorValues &continuousValues) const; - #ifdef GTSAM_ENABLE_BOOST_SERIALIZATION /** Serialization function */ friend class boost::serialization::access; From ecbf3d872ea467c94110b2f822d9e515e6757df6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 21 Sep 2024 05:15:35 -0400 Subject: [PATCH 292/398] make logNormalizationConstant return -log(k) --- gtsam/inference/Conditional-inst.h | 2 +- gtsam/inference/Conditional.h | 12 +++++++----- gtsam/linear/GaussianBayesNet.cpp | 13 +++++++------ gtsam/linear/GaussianConditional.cpp | 16 ++++++++-------- gtsam/linear/GaussianConditional.h | 6 ++++-- gtsam/linear/NoiseModel.cpp | 10 +++++----- gtsam/linear/NoiseModel.h | 4 ++-- gtsam/linear/tests/testGaussianBayesNet.cpp | 4 ++-- gtsam/linear/tests/testGaussianConditional.cpp | 11 ++++++----- gtsam/linear/tests/testGaussianDensity.cpp | 2 +- gtsam/linear/tests/testNoiseModel.cpp | 12 ++++++------ 11 files changed, 49 insertions(+), 43 deletions(-) diff --git a/gtsam/inference/Conditional-inst.h b/gtsam/inference/Conditional-inst.h index 9377e8cc4..b61a5ac22 100644 --- a/gtsam/inference/Conditional-inst.h +++ b/gtsam/inference/Conditional-inst.h @@ -89,7 +89,7 @@ bool Conditional::CheckInvariants( // normalization constant const double error = conditional.error(values); if (error < 0.0) return false; // prob_or_density is negative. - const double expected = conditional.logNormalizationConstant() - error; + const double expected = -(conditional.logNormalizationConstant() + error); if (std::abs(logProb - expected) > 1e-9) return false; // logProb is not consistent with error return true; diff --git a/gtsam/inference/Conditional.h b/gtsam/inference/Conditional.h index c76c05ab1..936a078a8 100644 --- a/gtsam/inference/Conditional.h +++ b/gtsam/inference/Conditional.h @@ -34,9 +34,10 @@ namespace gtsam { * `logProbability` is the main methods that need to be implemented in derived * classes. These two methods relate to the `error` method in the factor by: * probability(x) = k exp(-error(x)) - * where k is a normalization constant making \int probability(x) == 1.0, and - * logProbability(x) = K - error(x) - * i.e., K = log(K). The normalization constant K is assumed to *not* depend + * where k is a normalization constant making + * \int probability(x) = \int k exp(-error(x)) == 1.0, and + * logProbability(x) = -(K + error(x)) + * i.e., K = -log(k). The normalization constant k is assumed to *not* depend * on any argument, only (possibly) on the conditional parameters. * This class provides a default logNormalizationConstant() == 0.0. * @@ -163,8 +164,9 @@ namespace gtsam { } /** - * All conditional types need to implement a log normalization constant to - * make it such that error>=0. + * All conditional types need to implement a + * (negative) log normalization constant + * to make it such that error>=0. */ virtual double logNormalizationConstant() const; diff --git a/gtsam/linear/GaussianBayesNet.cpp b/gtsam/linear/GaussianBayesNet.cpp index 861e19cc9..b7f296ea5 100644 --- a/gtsam/linear/GaussianBayesNet.cpp +++ b/gtsam/linear/GaussianBayesNet.cpp @@ -246,14 +246,15 @@ namespace gtsam { double GaussianBayesNet::logNormalizationConstant() const { /* normalization constant = 1.0 / sqrt((2*pi)^n*det(Sigma)) - logConstant = -0.5 * n*log(2*pi) - 0.5 * log det(Sigma) + logConstant = -log(normalizationConstant) + = 0.5 * n*log(2*pi) + 0.5 * log(det(Sigma)) - log det(Sigma)) = -2.0 * logDeterminant() - thus, logConstant = -0.5*n*log(2*pi) + logDeterminant() + log(det(Sigma)) = -2.0 * logDeterminant() + thus, logConstant = 0.5*n*log(2*pi) - logDeterminant() - BayesNet logConstant = sum(-0.5*n_i*log(2*pi) + logDeterminant_i()) - = sum(-0.5*n_i*log(2*pi)) + sum(logDeterminant_i()) - = sum(-0.5*n_i*log(2*pi)) + bn->logDeterminant() + BayesNet logConstant = sum(0.5*n_i*log(2*pi) - logDeterminant_i()) + = sum(0.5*n_i*log(2*pi)) - sum(logDeterminant_i()) + = sum(0.5*n_i*log(2*pi)) - bn->logDeterminant() */ double logNormConst = 0.0; for (const sharedConditional& cg : *this) { diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index 585bbdb33..fc633d04d 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -181,24 +181,24 @@ namespace gtsam { /* ************************************************************************* */ // normalization constant = 1.0 / sqrt((2*pi)^n*det(Sigma)) - // log = - 0.5 * n*log(2*pi) - 0.5 * log det(Sigma) + // neg-log = 0.5 * n*log(2*pi) + 0.5 * log det(Sigma) double GaussianConditional::logNormalizationConstant() const { constexpr double log2pi = 1.8378770664093454835606594728112; size_t n = d().size(); // Sigma = (R'R)^{-1}, det(Sigma) = det((R'R)^{-1}) = det(R'R)^{-1} // log det(Sigma) = -log(det(R'R)) = -2*log(det(R)) // Hence, log det(Sigma)) = -2.0 * logDeterminant() - // which gives log = -0.5*n*log(2*pi) - 0.5*(-2.0 * logDeterminant()) - // = -0.5*n*log(2*pi) + (0.5*2.0 * logDeterminant()) - // = -0.5*n*log(2*pi) + logDeterminant() - return -0.5 * n * log2pi + logDeterminant(); + // which gives neg-log = 0.5*n*log(2*pi) + 0.5*(-2.0 * logDeterminant()) + // = 0.5*n*log(2*pi) - (0.5*2.0 * logDeterminant()) + // = 0.5*n*log(2*pi) - logDeterminant() + return 0.5 * n * log2pi - logDeterminant(); } /* ************************************************************************* */ - // density = k exp(-error(x)) - // log = log(k) - error(x) + // density = 1/k exp(-error(x)) + // log = -log(k) - error(x) double GaussianConditional::logProbability(const VectorValues& x) const { - return logNormalizationConstant() - error(x); + return -logNormalizationConstant() - error(x); } double GaussianConditional::logProbability(const HybridValues& x) const { diff --git a/gtsam/linear/GaussianConditional.h b/gtsam/linear/GaussianConditional.h index 420efabca..757687ed8 100644 --- a/gtsam/linear/GaussianConditional.h +++ b/gtsam/linear/GaussianConditional.h @@ -133,8 +133,10 @@ namespace gtsam { /// @{ /** - * normalization constant = 1.0 / sqrt((2*pi)^n*det(Sigma)) - * log = - 0.5 * n*log(2*pi) - 0.5 * log det(Sigma) + * Return normalization constant in negative log space. + * + * normalization constant k = 1.0 / sqrt((2*pi)^n*det(Sigma)) + * -log(k) = 0.5 * n*log(2*pi) + 0.5 * log det(Sigma) */ double logNormalizationConstant() const override; diff --git a/gtsam/linear/NoiseModel.cpp b/gtsam/linear/NoiseModel.cpp index de69fdce9..ea9ec1aec 100644 --- a/gtsam/linear/NoiseModel.cpp +++ b/gtsam/linear/NoiseModel.cpp @@ -257,13 +257,13 @@ double Gaussian::logDeterminant() const { /* *******************************************************************************/ double Gaussian::logNormalizationConstant() const { // log(det(Sigma)) = -2.0 * logDetR - // which gives log = -0.5*n*log(2*pi) - 0.5*(-2.0 * logDetR()) - // = -0.5*n*log(2*pi) + (0.5*2.0 * logDetR()) - // = -0.5*n*log(2*pi) + logDetR() + // which gives neg-log = 0.5*n*log(2*pi) + 0.5*(-2.0 * logDetR()) + // = 0.5*n*log(2*pi) - (0.5*2.0 * logDetR()) + // = 0.5*n*log(2*pi) - logDetR() size_t n = dim(); constexpr double log2pi = 1.8378770664093454835606594728112; - // Get 1/log(\sqrt(|2pi Sigma|)) = -0.5*log(|2pi Sigma|) - return -0.5 * n * log2pi + logDetR(); + // Get -log(1/\sqrt(|2pi Sigma|)) = 0.5*log(|2pi Sigma|) + return 0.5 * n * log2pi - logDetR(); } diff --git a/gtsam/linear/NoiseModel.h b/gtsam/linear/NoiseModel.h index 91c11d410..31ffd1107 100644 --- a/gtsam/linear/NoiseModel.h +++ b/gtsam/linear/NoiseModel.h @@ -274,8 +274,8 @@ namespace gtsam { /** * @brief Method to compute the normalization constant * for a Gaussian noise model k = \sqrt(1/|2πΣ|). - * We compute this in the log-space for numerical accuracy, - * thus returning log(k). + * We compute this in the negative log-space for numerical accuracy, + * thus returning -log(k). * * @return double */ diff --git a/gtsam/linear/tests/testGaussianBayesNet.cpp b/gtsam/linear/tests/testGaussianBayesNet.cpp index 99453ee4e..a186eb2b2 100644 --- a/gtsam/linear/tests/testGaussianBayesNet.cpp +++ b/gtsam/linear/tests/testGaussianBayesNet.cpp @@ -76,11 +76,11 @@ TEST(GaussianBayesNet, Evaluate1) { // the normalization constant 1.0/sqrt((2*pi*Sigma).det()). // The covariance matrix inv(Sigma) = R'*R, so the determinant is const double constant = sqrt((invSigma / (2 * M_PI)).determinant()); - EXPECT_DOUBLES_EQUAL(log(constant), + EXPECT_DOUBLES_EQUAL(-log(constant), smallBayesNet.at(0)->logNormalizationConstant() + smallBayesNet.at(1)->logNormalizationConstant(), 1e-9); - EXPECT_DOUBLES_EQUAL(log(constant), smallBayesNet.logNormalizationConstant(), + EXPECT_DOUBLES_EQUAL(-log(constant), smallBayesNet.logNormalizationConstant(), 1e-9); const double actual = smallBayesNet.evaluate(mean); EXPECT_DOUBLES_EQUAL(constant, actual, 1e-9); diff --git a/gtsam/linear/tests/testGaussianConditional.cpp b/gtsam/linear/tests/testGaussianConditional.cpp index dcd821889..b03e0a060 100644 --- a/gtsam/linear/tests/testGaussianConditional.cpp +++ b/gtsam/linear/tests/testGaussianConditional.cpp @@ -492,9 +492,10 @@ TEST(GaussianConditional, LogNormalizationConstant) { VectorValues x; x.insert(X(0), Vector3::Zero()); Matrix3 Sigma = I_3x3 * sigma * sigma; - double expectedLogNormalizingConstant = log(1 / sqrt((2 * M_PI * Sigma).determinant())); + double expectedLogNormalizationConstant = + -log(1 / sqrt((2 * M_PI * Sigma).determinant())); - EXPECT_DOUBLES_EQUAL(expectedLogNormalizingConstant, + EXPECT_DOUBLES_EQUAL(expectedLogNormalizationConstant, conditional.logNormalizationConstant(), 1e-9); } @@ -516,7 +517,7 @@ TEST(GaussianConditional, Print) { " d = [ 20 40 ]\n" " mean: 1 elements\n" " x0: 20 40\n" - " logNormalizationConstant: -4.0351\n" + " logNormalizationConstant: 4.0351\n" "isotropic dim=2 sigma=3\n"; EXPECT(assert_print_equal(expected, conditional, "GaussianConditional")); @@ -531,7 +532,7 @@ TEST(GaussianConditional, Print) { " S[x1] = [ -1 -2 ]\n" " [ -3 -4 ]\n" " d = [ 20 40 ]\n" - " logNormalizationConstant: -4.0351\n" + " logNormalizationConstant: 4.0351\n" "isotropic dim=2 sigma=3\n"; EXPECT(assert_print_equal(expected1, conditional1, "GaussianConditional")); @@ -547,7 +548,7 @@ TEST(GaussianConditional, Print) { " S[y1] = [ -5 -6 ]\n" " [ -7 -8 ]\n" " d = [ 20 40 ]\n" - " logNormalizationConstant: -4.0351\n" + " logNormalizationConstant: 4.0351\n" "isotropic dim=2 sigma=3\n"; EXPECT(assert_print_equal(expected2, conditional2, "GaussianConditional")); } diff --git a/gtsam/linear/tests/testGaussianDensity.cpp b/gtsam/linear/tests/testGaussianDensity.cpp index 97eb0de70..3226f40ab 100644 --- a/gtsam/linear/tests/testGaussianDensity.cpp +++ b/gtsam/linear/tests/testGaussianDensity.cpp @@ -55,7 +55,7 @@ TEST(GaussianDensity, FromMeanAndStddev) { double expected1 = 0.5 * e.dot(e); EXPECT_DOUBLES_EQUAL(expected1, density.error(values), 1e-9); - double expected2 = density.logNormalizationConstant()- 0.5 * e.dot(e); + double expected2 = -(density.logNormalizationConstant() + 0.5 * e.dot(e)); EXPECT_DOUBLES_EQUAL(expected2, density.logProbability(values), 1e-9); } diff --git a/gtsam/linear/tests/testNoiseModel.cpp b/gtsam/linear/tests/testNoiseModel.cpp index 6aea154ee..59ee05d07 100644 --- a/gtsam/linear/tests/testNoiseModel.cpp +++ b/gtsam/linear/tests/testNoiseModel.cpp @@ -810,9 +810,9 @@ TEST(NoiseModel, NonDiagonalGaussian) TEST(NoiseModel, LogNormalizationConstant1D) { // Very simple 1D noise model, which we can compute by hand. double sigma = 0.1; - // For expected values, we compute 1/log(sqrt(|2πΣ|)) by hand. - // = -0.5*(log(2π) + log(Σ)) (since it is 1D) - double expected_value = -0.5 * log(2 * M_PI * sigma * sigma); + // For expected values, we compute -log(1/sqrt(|2πΣ|)) by hand. + // = 0.5*(log(2π) + log(Σ)) (since it is 1D) + double expected_value = 0.5 * log(2 * M_PI * sigma * sigma); // Gaussian { @@ -839,7 +839,7 @@ TEST(NoiseModel, LogNormalizationConstant1D) { auto noise_model = Unit::Create(1); double actual_value = noise_model->logNormalizationConstant(); double sigma = 1.0; - expected_value = -0.5 * log(2 * M_PI * sigma * sigma); + expected_value = 0.5 * log(2 * M_PI * sigma * sigma); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); } } @@ -850,7 +850,7 @@ TEST(NoiseModel, LogNormalizationConstant3D) { size_t n = 3; // We compute the expected values just like in the LogNormalizationConstant1D // test, but we multiply by 3 due to the determinant. - double expected_value = -0.5 * n * log(2 * M_PI * sigma * sigma); + double expected_value = 0.5 * n * log(2 * M_PI * sigma * sigma); // Gaussian { @@ -879,7 +879,7 @@ TEST(NoiseModel, LogNormalizationConstant3D) { auto noise_model = Unit::Create(3); double actual_value = noise_model->logNormalizationConstant(); double sigma = 1.0; - expected_value = -0.5 * n * log(2 * M_PI * sigma * sigma); + expected_value = 0.5 * n * log(2 * M_PI * sigma * sigma); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); } } From ceb9496e7c6456a16e7fa812bff944009cfbe88d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 21 Sep 2024 05:16:14 -0400 Subject: [PATCH 293/398] update hybrid code to use -log(k) consistently --- gtsam/hybrid/HybridGaussianConditional.cpp | 12 ++++---- gtsam/hybrid/HybridGaussianConditional.h | 2 +- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 11 +++---- .../tests/testHybridGaussianConditional.cpp | 15 +++++----- .../hybrid/tests/testHybridGaussianFactor.cpp | 4 +-- .../tests/testHybridNonlinearFactorGraph.cpp | 30 +++++++++---------- 6 files changed, 38 insertions(+), 36 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index fb943366c..4ee5241a3 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -36,7 +36,7 @@ HybridGaussianFactor::FactorValuePairs GetFactorValuePairs( // Check if conditional is pruned if (conditional) { // Assign log(\sqrt(|2πΣ|)) = -log(1 / sqrt(|2πΣ|)) - value = -conditional->logNormalizationConstant(); + value = conditional->logNormalizationConstant(); } return {std::dynamic_pointer_cast(conditional), value}; }; @@ -51,14 +51,14 @@ HybridGaussianConditional::HybridGaussianConditional( discreteParents, GetFactorValuePairs(conditionals)), BaseConditional(continuousFrontals.size()), conditionals_(conditionals) { - // Calculate logConstant_ as the minimum of the log normalizers of the - // conditionals, by visiting the decision tree: + // Calculate logConstant_ as the minimum of the negative-log normalizers of + // the conditionals, by visiting the decision tree: logConstant_ = std::numeric_limits::infinity(); conditionals_.visit( [this](const GaussianConditional::shared_ptr &conditional) { if (conditional) { this->logConstant_ = std::min( - this->logConstant_, -conditional->logNormalizationConstant()); + this->logConstant_, conditional->logNormalizationConstant()); } }); } @@ -85,7 +85,7 @@ GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree() // First check if conditional has not been pruned if (gc) { const double Cgm_Kgcm = - -this->logConstant_ - gc->logNormalizationConstant(); + gc->logNormalizationConstant() - this->logConstant_; // If there is a difference in the covariances, we need to account for // that since the error is dependent on the mode. if (Cgm_Kgcm > 0.0) { @@ -216,7 +216,7 @@ std::shared_ptr HybridGaussianConditional::likelihood( -> GaussianFactorValuePair { const auto likelihood_m = conditional->likelihood(given); const double Cgm_Kgcm = - -logConstant_ - conditional->logNormalizationConstant(); + conditional->logNormalizationConstant() - logConstant_; if (Cgm_Kgcm == 0.0) { return {likelihood_m, 0.0}; } else { diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 4a5fdcc89..676c45f61 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -152,7 +152,7 @@ class GTSAM_EXPORT HybridGaussianConditional /// The log normalization constant is max of the the individual /// log-normalization constants. - double logNormalizationConstant() const override { return -logConstant_; } + double logNormalizationConstant() const override { return logConstant_; } /** * Create a likelihood factor for a hybrid Gaussian conditional, diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index a6fe955eb..ff9ca7a55 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -329,9 +329,9 @@ static std::shared_ptr createDiscreteFactor( // Logspace version of: // exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); - // We take negative of the logNormalizationConstant `log(k)` - // to get `log(1/k) = log(\sqrt{|2πΣ|})`. - return -factor->error(kEmpty) - conditional->logNormalizationConstant(); + // logNormalizationConstant gives `-log(k)` + // which is `-log(k) = log(1/k) = log(\sqrt{|2πΣ|})`. + return -factor->error(kEmpty) + conditional->logNormalizationConstant(); }; AlgebraicDecisionTree logProbabilities( @@ -355,8 +355,9 @@ static std::shared_ptr createHybridGaussianFactor( auto hf = std::dynamic_pointer_cast(factor); if (!hf) throw std::runtime_error("Expected HessianFactor!"); // Add 2.0 term since the constant term will be premultiplied by 0.5 - // as per the Hessian definition - hf->constantTerm() += 2.0 * conditional->logNormalizationConstant(); + // as per the Hessian definition, + // and negative since we want log(k) + hf->constantTerm() += -2.0 * conditional->logNormalizationConstant(); } return {factor, 0.0}; }; diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index dde493685..040cd2ff0 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -180,12 +180,13 @@ TEST(HybridGaussianConditional, Error2) { // Check result. DiscreteKeys discrete_keys{mode}; - double logNormalizer0 = -conditionals[0]->logNormalizationConstant(); - double logNormalizer1 = -conditionals[1]->logNormalizationConstant(); + double logNormalizer0 = conditionals[0]->logNormalizationConstant(); + double logNormalizer1 = conditionals[1]->logNormalizationConstant(); double minLogNormalizer = std::min(logNormalizer0, logNormalizer1); - // Expected error is e(X) + log(|2πΣ|). - // We normalize log(|2πΣ|) with min(logNormalizers) so it is non-negative. + // Expected error is e(X) + log(sqrt(|2πΣ|)). + // We normalize log(sqrt(|2πΣ|)) with min(logNormalizers) + // so it is non-negative. std::vector leaves = { conditionals[0]->error(vv) + logNormalizer0 - minLogNormalizer, conditionals[1]->error(vv) + logNormalizer1 - minLogNormalizer}; @@ -196,7 +197,7 @@ TEST(HybridGaussianConditional, Error2) { // Check for non-tree version. for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv) - + EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv) + conditionals[mode]->logNormalizationConstant() - minLogNormalizer, hybrid_conditional.error(hv), 1e-8); @@ -230,8 +231,8 @@ TEST(HybridGaussianConditional, Likelihood2) { CHECK(jf1->rows() == 2); // Check that the constant C1 is properly encoded in the JacobianFactor. - const double C1 = hybrid_conditional.logNormalizationConstant() - - conditionals[1]->logNormalizationConstant(); + const double C1 = conditionals[1]->logNormalizationConstant() - + hybrid_conditional.logNormalizationConstant(); const double c1 = std::sqrt(2.0 * C1); Vector expected_unwhitened(2); expected_unwhitened << 4.9 - 5.0, -c1; diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index bfa283983..cd5f9bf07 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -773,8 +773,8 @@ static HybridGaussianFactorGraph CreateFactorGraph( // We take negative since we want // the underlying scalar to be log(\sqrt(|2πΣ|)) std::vector factors{ - {f0, -model0->logNormalizationConstant()}, - {f1, -model1->logNormalizationConstant()}}; + {f0, model0->logNormalizationConstant()}, + {f1, model1->logNormalizationConstant()}}; HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors); HybridGaussianFactorGraph hfg; diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 347cc5f1f..cd9b24b37 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -714,26 +714,26 @@ factor 6: P( m1 | m0 ): size: 3 conditional 0: Hybrid P( x0 | x1 m0) Discrete Keys = (m0, 2), - logNormalizationConstant: 1.38862 + logNormalizationConstant: -1.38862 Choice(m0) 0 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.85087 ] - logNormalizationConstant: 1.38862 + logNormalizationConstant: -1.38862 No noise model 1 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.95037 ] - logNormalizationConstant: 1.38862 + logNormalizationConstant: -1.38862 No noise model conditional 1: Hybrid P( x1 | x2 m0 m1) Discrete Keys = (m0, 2), (m1, 2), - logNormalizationConstant: 1.3935 + logNormalizationConstant: -1.3935 Choice(m1) 0 Choice(m0) @@ -741,14 +741,14 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.99901 ] - logNormalizationConstant: 1.3935 + logNormalizationConstant: -1.3935 No noise model 0 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.90098 ] - logNormalizationConstant: 1.3935 + logNormalizationConstant: -1.3935 No noise model 1 Choice(m0) @@ -756,19 +756,19 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10.098 ] - logNormalizationConstant: 1.3935 + logNormalizationConstant: -1.3935 No noise model 1 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10 ] - logNormalizationConstant: 1.3935 + logNormalizationConstant: -1.3935 No noise model conditional 2: Hybrid P( x2 | m0 m1) Discrete Keys = (m0, 2), (m1, 2), - logNormalizationConstant: 1.38857 + logNormalizationConstant: -1.38857 Choice(m1) 0 Choice(m0) @@ -777,7 +777,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1489 ] mean: 1 elements x2: -1.0099 - logNormalizationConstant: 1.38857 + logNormalizationConstant: -1.38857 No noise model 0 1 Leaf p(x2) @@ -785,7 +785,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1479 ] mean: 1 elements x2: -1.0098 - logNormalizationConstant: 1.38857 + logNormalizationConstant: -1.38857 No noise model 1 Choice(m0) @@ -794,7 +794,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0504 ] mean: 1 elements x2: -1.0001 - logNormalizationConstant: 1.38857 + logNormalizationConstant: -1.38857 No noise model 1 1 Leaf p(x2) @@ -802,7 +802,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0494 ] mean: 1 elements x2: -1 - logNormalizationConstant: 1.38857 + logNormalizationConstant: -1.38857 No noise model )"; @@ -903,8 +903,8 @@ static HybridNonlinearFactorGraph CreateFactorGraph( // We take negative since we want // the underlying scalar to be log(\sqrt(|2πΣ|)) std::vector factors{ - {f0, -model0->logNormalizationConstant()}, - {f1, -model1->logNormalizationConstant()}}; + {f0, model0->logNormalizationConstant()}, + {f1, model1->logNormalizationConstant()}}; HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors); From d3971b93c82433c7de8bfbc08f9e338733755dcc Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 21 Sep 2024 06:14:36 -0400 Subject: [PATCH 294/398] fix python tests --- python/gtsam/tests/test_HybridBayesNet.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/gtsam/tests/test_HybridBayesNet.py b/python/gtsam/tests/test_HybridBayesNet.py index 2f7dd4a98..bb12ff02b 100644 --- a/python/gtsam/tests/test_HybridBayesNet.py +++ b/python/gtsam/tests/test_HybridBayesNet.py @@ -90,8 +90,8 @@ class TestHybridBayesNet(GtsamTestCase): self.assertTrue(probability >= 0.0) logProb = conditional.logProbability(values) self.assertAlmostEqual(probability, np.exp(logProb)) - expected = conditional.logNormalizationConstant() - \ - conditional.error(values) + expected = -(conditional.logNormalizationConstant() + \ + conditional.error(values)) self.assertAlmostEqual(logProb, expected) From f84a4c71ae29b12c06acf7edb266ea060a967dd3 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 21 Sep 2024 15:14:35 -0700 Subject: [PATCH 295/398] New constructor and docs --- gtsam/hybrid/HybridGaussianFactor.cpp | 2 +- gtsam/hybrid/HybridGaussianFactor.h | 51 ++++++++++++++++++--------- 2 files changed, 35 insertions(+), 18 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index 2fbd4bd88..88c557672 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -38,7 +38,7 @@ namespace gtsam { * Gaussian factor in factors. * @return HybridGaussianFactor::Factors */ -HybridGaussianFactor::Factors augment( +static HybridGaussianFactor::Factors augment( const HybridGaussianFactor::FactorValuePairs &factors) { // Find the minimum value so we can "proselytize" to positive values. // Done because we can't have sqrt of negative numbers. diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index a86714863..925a37e04 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -89,25 +89,28 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { HybridGaussianFactor() = default; /** - * @brief Construct a new hybrid Gaussian factor. - * - * @param continuousKeys A vector of keys representing continuous variables. - * @param discreteKeys A vector of keys representing discrete variables and - * their cardinalities. - * @param factors The decision tree of Gaussian factors and arbitrary scalars. - */ - HybridGaussianFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys, - const FactorValuePairs &factors); - - /** - * @brief Construct a new HybridGaussianFactor object using a vector of - * GaussianFactor shared pointers. + * @brief Construct a new HybridGaussianFactor on a single discrete key, + * providing the factors for each mode m as a vector of factors ϕ_m(x). + * The value ϕ(x,m) for the factor is simply ϕ_m(x). * * @param continuousKeys Vector of keys for continuous factors. - * @param discreteKey The discrete key to index each component. - * @param factors Vector of gaussian factor shared pointers - * and arbitrary scalars. Same size as the cardinality of discreteKey. + * @param discreteKey The discrete key for the "mode", indexing components. + * @param factors Vector of gaussian factors, one for each mode. + */ + HybridGaussianFactor(const KeyVector &continuousKeys, + const DiscreteKey &discreteKey, + const std::vector &factors) + : Base(continuousKeys, {discreteKey}), factors_({discreteKey}, factors) {} + + /** + * @brief Construct a new HybridGaussianFactor on a single discrete key, + * including a scalar error value for each mode m. The factors and scalars are + * provided as a vector of pairs (ϕ_m(x), E_m). + * The value ϕ(x,m) for the factor is now ϕ_m(x) + E_m. + * + * @param continuousKeys Vector of keys for continuous factors. + * @param discreteKey The discrete key for the "mode", indexing components. + * @param factors Vector of gaussian factor-scalar pairs, one per mode. */ HybridGaussianFactor(const KeyVector &continuousKeys, const DiscreteKey &discreteKey, @@ -115,6 +118,20 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { : HybridGaussianFactor(continuousKeys, {discreteKey}, FactorValuePairs({discreteKey}, factors)) {} + /** + * @brief Construct a new HybridGaussianFactor on a several discrete keys M, + * including a scalar error value for each assignment m. The factors and + * scalars are provided as a DecisionTree of pairs (ϕ_M(x), E_M). + * The value ϕ(x,M) for the factor is again ϕ_m(x) + E_m. + * + * @param continuousKeys A vector of keys representing continuous variables. + * @param discreteKeys Discrete variables and their cardinalities. + * @param factors The decision tree of Gaussian factor/scalar pairs. + */ + HybridGaussianFactor(const KeyVector &continuousKeys, + const DiscreteKeys &discreteKeys, + const FactorValuePairs &factors); + /// @} /// @name Testable /// @{ From 035f2849d0a644e122a8c8665c43697e49e034db Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 21 Sep 2024 15:52:45 -0700 Subject: [PATCH 296/398] Use simple constructor --- gtsam/hybrid/tests/Switching.h | 34 +++++----- gtsam/hybrid/tests/testHybridFactorGraph.cpp | 14 ++-- .../hybrid/tests/testHybridGaussianFactor.cpp | 60 +++++++++-------- .../tests/testHybridGaussianFactorGraph.cpp | 65 ++++++------------- 4 files changed, 78 insertions(+), 95 deletions(-) diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index f18c19559..2de8e7fc0 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -31,6 +31,9 @@ #include +#include "gtsam/linear/GaussianFactor.h" +#include "gtsam/linear/GaussianFactorGraph.h" + #pragma once namespace gtsam { @@ -44,33 +47,28 @@ using symbol_shorthand::X; * system which depends on a discrete mode at each time step of the chain. * * @param n The number of chain elements. - * @param keyFunc The functional to help specify the continuous key. - * @param dKeyFunc The functional to help specify the discrete key. + * @param x The functional to help specify the continuous key. + * @param m The functional to help specify the discrete key. * @return HybridGaussianFactorGraph::shared_ptr */ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( - size_t n, std::function keyFunc = X, - std::function dKeyFunc = M) { + size_t n, std::function x = X, std::function m = M) { HybridGaussianFactorGraph hfg; - hfg.add(JacobianFactor(keyFunc(1), I_3x3, Z_3x1)); + hfg.add(JacobianFactor(x(1), I_3x3, Z_3x1)); - // keyFunc(1) to keyFunc(n+1) + // x(1) to x(n+1) for (size_t t = 1; t < n; t++) { - DiscreteKeys dKeys{{dKeyFunc(t), 2}}; - HybridGaussianFactor::FactorValuePairs components( - dKeys, {{std::make_shared(keyFunc(t), I_3x3, - keyFunc(t + 1), I_3x3, Z_3x1), - 0.0}, - {std::make_shared( - keyFunc(t), I_3x3, keyFunc(t + 1), I_3x3, Vector3::Ones()), - 0.0}}); - hfg.add( - HybridGaussianFactor({keyFunc(t), keyFunc(t + 1)}, dKeys, components)); + DiscreteKeys dKeys{{m(t), 2}}; + std::vector components; + components.emplace_back( + new JacobianFactor(x(t), I_3x3, x(t + 1), I_3x3, Z_3x1)); + components.emplace_back( + new JacobianFactor(x(t), I_3x3, x(t + 1), I_3x3, Vector3::Ones())); + hfg.add(HybridGaussianFactor({x(t), x(t + 1)}, {m(t), 2}, components)); if (t > 1) { - hfg.add(DecisionTreeFactor({{dKeyFunc(t - 1), 2}, {dKeyFunc(t), 2}}, - "0 1 1 3")); + hfg.add(DecisionTreeFactor({{m(t - 1), 2}, {m(t), 2}}, "0 1 1 3")); } } diff --git a/gtsam/hybrid/tests/testHybridFactorGraph.cpp b/gtsam/hybrid/tests/testHybridFactorGraph.cpp index 20719b739..d76eaf08d 100644 --- a/gtsam/hybrid/tests/testHybridFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridFactorGraph.cpp @@ -52,10 +52,10 @@ TEST(HybridFactorGraph, Keys) { // Add a hybrid Gaussian factor ϕ(x1, c1) DiscreteKey m1(M(1), 2); - DecisionTree dt( - M(1), {std::make_shared(X(1), I_3x3, Z_3x1), 0.0}, - {std::make_shared(X(1), I_3x3, Vector3::Ones()), 0.0}); - hfg.add(HybridGaussianFactor({X(1)}, {m1}, dt)); + std::vector components{ + std::make_shared(X(1), I_3x3, Z_3x1), + std::make_shared(X(1), I_3x3, Vector3::Ones())}; + hfg.add(HybridGaussianFactor({X(1)}, {m1}, components)); KeySet expected_continuous{X(0), X(1)}; EXPECT( @@ -65,9 +65,11 @@ TEST(HybridFactorGraph, Keys) { EXPECT(assert_container_equality(expected_discrete, hfg.discreteKeySet())); } -/* ************************************************************************* */ +/* ************************************************************************* + */ int main() { TestResult tr; return TestRegistry::runAllTests(tr); } -/* ************************************************************************* */ +/* ************************************************************************* + */ diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index bfa283983..34341c999 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -54,32 +54,49 @@ TEST(HybridGaussianFactor, Constructor) { CHECK(it == factor.end()); } +/* ************************************************************************* */ +namespace testA { +DiscreteKey m1(1, 2); + +auto A1 = Matrix::Zero(2, 1); +auto A2 = Matrix::Zero(2, 2); +auto b = Matrix::Zero(2, 1); + +auto f10 = std::make_shared(X(1), A1, X(2), A2, b); +auto f11 = std::make_shared(X(1), A1, X(2), A2, b); +} // namespace testA + +/* ************************************************************************* */ +// Test simple to complex constructors... +TEST(HybridGaussianFactor, ConstructorVariants) { + using namespace testA; + HybridGaussianFactor fromFactors({X(1), X(2)}, m1, {f10, f11}); + + std::vector pairs{{f10, 0.0}, {f11, 0.0}}; + HybridGaussianFactor fromPairs({X(1), X(2)}, {m1}, pairs); + assert_equal(fromFactors, fromPairs); + + HybridGaussianFactor::FactorValuePairs decisionTree({m1}, pairs); + HybridGaussianFactor fromDecisionTree({X(1), X(2)}, {m1}, decisionTree); + assert_equal(fromDecisionTree, fromPairs); +} + /* ************************************************************************* */ // "Add" two hybrid factors together. TEST(HybridGaussianFactor, Sum) { - DiscreteKey m1(1, 2), m2(2, 3); + using namespace testA; + DiscreteKey m2(2, 3); - auto A1 = Matrix::Zero(2, 1); - auto A2 = Matrix::Zero(2, 2); auto A3 = Matrix::Zero(2, 3); - auto b = Matrix::Zero(2, 1); - Vector2 sigmas; - sigmas << 1, 2; - - auto f10 = std::make_shared(X(1), A1, X(2), A2, b); - auto f11 = std::make_shared(X(1), A1, X(2), A2, b); auto f20 = std::make_shared(X(1), A1, X(3), A3, b); auto f21 = std::make_shared(X(1), A1, X(3), A3, b); auto f22 = std::make_shared(X(1), A1, X(3), A3, b); - std::vector factorsA{{f10, 0.0}, {f11, 0.0}}; - std::vector factorsB{ - {f20, 0.0}, {f21, 0.0}, {f22, 0.0}}; // TODO(Frank): why specify keys at all? And: keys in factor should be *all* // keys, deviating from Kevin's scheme. Should we index DT on DiscreteKey? // Design review! - HybridGaussianFactor hybridFactorA({X(1), X(2)}, {m1}, factorsA); - HybridGaussianFactor hybridFactorB({X(1), X(3)}, {m2}, factorsB); + HybridGaussianFactor hybridFactorA({X(1), X(2)}, m1, {f10, f11}); + HybridGaussianFactor hybridFactorB({X(1), X(3)}, m2, {f20, f21, f22}); // Check that number of keys is 3 EXPECT_LONGS_EQUAL(3, hybridFactorA.keys().size()); @@ -104,15 +121,8 @@ TEST(HybridGaussianFactor, Sum) { /* ************************************************************************* */ TEST(HybridGaussianFactor, Printing) { - DiscreteKey m1(1, 2); - auto A1 = Matrix::Zero(2, 1); - auto A2 = Matrix::Zero(2, 2); - auto b = Matrix::Zero(2, 1); - auto f10 = std::make_shared(X(1), A1, X(2), A2, b); - auto f11 = std::make_shared(X(1), A1, X(2), A2, b); - std::vector factors{{f10, 0.0}, {f11, 0.0}}; - - HybridGaussianFactor hybridFactor({X(1), X(2)}, {m1}, factors); + using namespace testA; + HybridGaussianFactor hybridFactor({X(1), X(2)}, m1, {f10, f11}); std::string expected = R"(HybridGaussianFactor @@ -179,9 +189,7 @@ TEST(HybridGaussianFactor, Error) { auto f0 = std::make_shared(X(1), A01, X(2), A02, b); auto f1 = std::make_shared(X(1), A11, X(2), A12, b); - std::vector factors{{f0, 0.0}, {f1, 0.0}}; - - HybridGaussianFactor hybridFactor({X(1), X(2)}, {m1}, factors); + HybridGaussianFactor hybridFactor({X(1), X(2)}, m1, {f0, f1}); VectorValues continuousValues; continuousValues.insert(X(1), Vector2(0, 0)); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 1530069aa..36e2887c8 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -114,6 +114,14 @@ TEST(HybridGaussianFactorGraph, EliminateMultifrontal) { EXPECT_LONGS_EQUAL(result.first->size(), 1); EXPECT_LONGS_EQUAL(result.second->size(), 1); } +/* ************************************************************************* */ + +namespace two { +std::vector components(Key key) { + return {std::make_shared(key, I_3x3, Z_3x1), + std::make_shared(key, I_3x3, Vector3::Ones())}; +} +} // namespace two /* ************************************************************************* */ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { @@ -127,10 +135,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { // Add a hybrid gaussian factor ϕ(x1, c1) DiscreteKey m1(M(1), 2); - DecisionTree dt( - M(1), {std::make_shared(X(1), I_3x3, Z_3x1), 0.0}, - {std::make_shared(X(1), I_3x3, Vector3::Ones()), 0.0}); - hfg.add(HybridGaussianFactor({X(1)}, {m1}, dt)); + hfg.add(HybridGaussianFactor({X(1)}, m1, two::components(X(1)))); auto result = hfg.eliminateSequential(); @@ -153,10 +158,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) { // Add factor between x0 and x1 hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - std::vector factors = { - {std::make_shared(X(1), I_3x3, Z_3x1), 0.0}, - {std::make_shared(X(1), I_3x3, Vector3::Ones()), 0.0}}; - hfg.add(HybridGaussianFactor({X(1)}, {m1}, factors)); + hfg.add(HybridGaussianFactor({X(1)}, m1, two::components(X(1)))); // Discrete probability table for c1 hfg.add(DecisionTreeFactor(m1, {2, 8})); @@ -178,10 +180,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - std::vector factors = { - {std::make_shared(X(1), I_3x3, Z_3x1), 0.0}, - {std::make_shared(X(1), I_3x3, Vector3::Ones()), 0.0}}; - hfg.add(HybridGaussianFactor({X(1)}, {M(1), 2}, factors)); + hfg.add(HybridGaussianFactor({X(1)}, {M(1), 2}, two::components(X(1)))); hfg.add(DecisionTreeFactor(m1, {2, 8})); // TODO(Varun) Adding extra discrete variable not connected to continuous @@ -207,13 +206,8 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) { // Factor between x0-x1 hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - // Decision tree with different modes on x1 - DecisionTree dt( - M(1), {std::make_shared(X(1), I_3x3, Z_3x1), 0.0}, - {std::make_shared(X(1), I_3x3, Vector3::Ones()), 0.0}); - // Hybrid factor P(x1|c1) - hfg.add(HybridGaussianFactor({X(1)}, {m}, dt)); + hfg.add(HybridGaussianFactor({X(1)}, m, two::components(X(1)))); // Prior factor on c1 hfg.add(DecisionTreeFactor(m, {2, 8})); @@ -241,13 +235,8 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { std::vector factors = { {std::make_shared(X(0), I_3x3, Z_3x1), 0.0}, {std::make_shared(X(0), I_3x3, Vector3::Ones()), 0.0}}; - hfg.add(HybridGaussianFactor({X(0)}, {M(0), 2}, factors)); - - DecisionTree dt1( - M(1), {std::make_shared(X(2), I_3x3, Z_3x1), 0.0}, - {std::make_shared(X(2), I_3x3, Vector3::Ones()), 0.0}); - - hfg.add(HybridGaussianFactor({X(2)}, {{M(1), 2}}, dt1)); + hfg.add(HybridGaussianFactor({X(0)}, {M(0), 2}, two::components(X(0)))); + hfg.add(HybridGaussianFactor({X(2)}, {M(1), 2}, two::components(X(2)))); } hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); @@ -256,17 +245,8 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { hfg.add(JacobianFactor(X(4), I_3x3, X(5), -I_3x3, Z_3x1)); { - DecisionTree dt( - M(3), {std::make_shared(X(3), I_3x3, Z_3x1), 0.0}, - {std::make_shared(X(3), I_3x3, Vector3::Ones()), 0.0}); - - hfg.add(HybridGaussianFactor({X(3)}, {{M(3), 2}}, dt)); - - DecisionTree dt1( - M(2), {std::make_shared(X(5), I_3x3, Z_3x1), 0.0}, - {std::make_shared(X(5), I_3x3, Vector3::Ones()), 0.0}); - - hfg.add(HybridGaussianFactor({X(5)}, {{M(2), 2}}, dt1)); + hfg.add(HybridGaussianFactor({X(3)}, {M(3), 2}, two::components(X(3)))); + hfg.add(HybridGaussianFactor({X(5)}, {M(2), 2}, two::components(X(5)))); } auto ordering_full = @@ -551,12 +531,7 @@ TEST(HybridGaussianFactorGraph, optimize) { hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - - DecisionTree dt( - C(1), {std::make_shared(X(1), I_3x3, Z_3x1), 0.0}, - {std::make_shared(X(1), I_3x3, Vector3::Ones()), 0.0}); - - hfg.add(HybridGaussianFactor({X(1)}, {c1}, dt)); + hfg.add(HybridGaussianFactor({X(1)}, c1, two::components(X(1)))); auto result = hfg.eliminateSequential(); @@ -642,13 +617,13 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) { // regression EXPECT(assert_equal(expected_error, error_tree, 1e-7)); - auto probs = graph.probPrime(delta.continuous()); + auto probabilities = graph.probPrime(delta.continuous()); std::vector prob_leaves = {0.36793249, 0.61247742, 0.59489556, 0.99029064}; - AlgebraicDecisionTree expected_probs(discrete_keys, prob_leaves); + AlgebraicDecisionTree expected_probabilities(discrete_keys, prob_leaves); // regression - EXPECT(assert_equal(expected_probs, probs, 1e-7)); + EXPECT(assert_equal(expected_probabilities, probabilities, 1e-7)); } /* ****************************************************************************/ From 5efac183417c5627fa2896cd0bed45aecb90749e Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 21 Sep 2024 16:01:19 -0700 Subject: [PATCH 297/398] Fixed some stragglers --- gtsam/hybrid/tests/testHybridFactorGraph.cpp | 8 +++----- gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp | 5 +---- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridFactorGraph.cpp b/gtsam/hybrid/tests/testHybridFactorGraph.cpp index d76eaf08d..e4d207af3 100644 --- a/gtsam/hybrid/tests/testHybridFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridFactorGraph.cpp @@ -55,7 +55,7 @@ TEST(HybridFactorGraph, Keys) { std::vector components{ std::make_shared(X(1), I_3x3, Z_3x1), std::make_shared(X(1), I_3x3, Vector3::Ones())}; - hfg.add(HybridGaussianFactor({X(1)}, {m1}, components)); + hfg.add(HybridGaussianFactor({X(1)}, m1, components)); KeySet expected_continuous{X(0), X(1)}; EXPECT( @@ -65,11 +65,9 @@ TEST(HybridFactorGraph, Keys) { EXPECT(assert_container_equality(expected_discrete, hfg.discreteKeySet())); } -/* ************************************************************************* - */ +/* ************************************************************************* */ int main() { TestResult tr; return TestRegistry::runAllTests(tr); } -/* ************************************************************************* - */ +/* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 36e2887c8..66a54f73f 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -232,9 +232,6 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1)); { - std::vector factors = { - {std::make_shared(X(0), I_3x3, Z_3x1), 0.0}, - {std::make_shared(X(0), I_3x3, Vector3::Ones()), 0.0}}; hfg.add(HybridGaussianFactor({X(0)}, {M(0), 2}, two::components(X(0)))); hfg.add(HybridGaussianFactor({X(2)}, {M(1), 2}, two::components(X(2)))); } @@ -259,7 +256,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { EXPECT_LONGS_EQUAL(0, remaining->size()); /* - (Fan) Explanation: the Junction tree will need to reeliminate to get to the + (Fan) Explanation: the Junction tree will need to re-eliminate to get to the marginal on X(1), which is not possible because it involves eliminating discrete before continuous. The solution to this, however, is in Murphy02. TLDR is that this is 1. expensive and 2. inexact. nevertheless it is doable. From 9f9032f9040934dcf2df699d64a1c9c3d705f5f6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 22 Sep 2024 11:06:56 -0400 Subject: [PATCH 298/398] add test for errorTree in incremental scenario --- .../tests/testHybridGaussianFactorGraph.cpp | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 1530069aa..f19fd29b1 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -706,6 +706,55 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) { EXPECT(assert_equal(expected, errorTree, 1e-9)); } +/* ****************************************************************************/ +// Test hybrid gaussian factor graph errorTree during +// incremental operation +TEST(HybridGaussianFactorGraph, IncrementalErrorTree) { + Switching s(4); + + HybridGaussianFactorGraph graph; + graph.push_back(s.linearizedFactorGraph.at(0)); // f(X0) + graph.push_back(s.linearizedFactorGraph.at(1)); // f(X0, X1, M0) + graph.push_back(s.linearizedFactorGraph.at(2)); // f(X1, X2, M1) + graph.push_back(s.linearizedFactorGraph.at(4)); // f(X1) + graph.push_back(s.linearizedFactorGraph.at(5)); // f(X2) + graph.push_back(s.linearizedFactorGraph.at(7)); // f(M0) + graph.push_back(s.linearizedFactorGraph.at(8)); // f(M0, M1) + + HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential(); + EXPECT_LONGS_EQUAL(5, hybridBayesNet->size()); + + HybridValues delta = hybridBayesNet->optimize(); + auto error_tree = graph.errorTree(delta.continuous()); + + std::vector discrete_keys = {{M(0), 2}, {M(1), 2}}; + std::vector leaves = {0.99985581, 0.4902432, 0.51936941, + 0.0097568009}; + AlgebraicDecisionTree expected_error(discrete_keys, leaves); + + // regression + EXPECT(assert_equal(expected_error, error_tree, 1e-7)); + + graph = HybridGaussianFactorGraph(); + graph.push_back(*hybridBayesNet); + graph.push_back(s.linearizedFactorGraph.at(3)); // f(X2, X3, M2) + graph.push_back(s.linearizedFactorGraph.at(6)); // f(X3) + + hybridBayesNet = graph.eliminateSequential(); + EXPECT_LONGS_EQUAL(7, hybridBayesNet->size()); + + delta = hybridBayesNet->optimize(); + auto error_tree2 = graph.errorTree(delta.continuous()); + + discrete_keys = {{M(0), 2}, {M(1), 2}, {M(2), 2}}; + leaves = {0.50985198, 0.0097577296, 0.50009425, 0, + 0.52922138, 0.029127133, 0.50985105, 0.0097567964}; + AlgebraicDecisionTree expected_error2(discrete_keys, leaves); + + // regression + EXPECT(assert_equal(expected_error, error_tree, 1e-7)); +} + /* ****************************************************************************/ // Check that assembleGraphTree assembles Gaussian factor graphs for each // assignment. From 39336a0b5bd36144155a5fb18d73e98cb915da33 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 22 Sep 2024 13:23:13 -0700 Subject: [PATCH 299/398] Address comments --- gtsam/hybrid/tests/testHybridGaussianFactor.cpp | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 34341c999..cd12d4f10 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -55,7 +55,7 @@ TEST(HybridGaussianFactor, Constructor) { } /* ************************************************************************* */ -namespace testA { +namespace test_constructor { DiscreteKey m1(1, 2); auto A1 = Matrix::Zero(2, 1); @@ -64,12 +64,12 @@ auto b = Matrix::Zero(2, 1); auto f10 = std::make_shared(X(1), A1, X(2), A2, b); auto f11 = std::make_shared(X(1), A1, X(2), A2, b); -} // namespace testA +} // namespace test_constructor /* ************************************************************************* */ // Test simple to complex constructors... TEST(HybridGaussianFactor, ConstructorVariants) { - using namespace testA; + using namespace test_constructor; HybridGaussianFactor fromFactors({X(1), X(2)}, m1, {f10, f11}); std::vector pairs{{f10, 0.0}, {f11, 0.0}}; @@ -84,7 +84,7 @@ TEST(HybridGaussianFactor, ConstructorVariants) { /* ************************************************************************* */ // "Add" two hybrid factors together. TEST(HybridGaussianFactor, Sum) { - using namespace testA; + using namespace test_constructor; DiscreteKey m2(2, 3); auto A3 = Matrix::Zero(2, 3); @@ -121,7 +121,7 @@ TEST(HybridGaussianFactor, Sum) { /* ************************************************************************* */ TEST(HybridGaussianFactor, Printing) { - using namespace testA; + using namespace test_constructor; HybridGaussianFactor hybridFactor({X(1), X(2)}, m1, {f10, f11}); std::string expected = From 2d2213e8806afad763c2ee620531b6a9c25950f3 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 22 Sep 2024 22:11:23 -0400 Subject: [PATCH 300/398] add errorConstant method and use it for logNormalizationConstant in Conditional --- gtsam/discrete/DiscreteConditional.cpp | 6 ++++++ gtsam/discrete/DiscreteConditional.h | 5 +++-- gtsam/hybrid/HybridConditional.cpp | 10 +++++----- gtsam/hybrid/HybridConditional.h | 5 +++-- gtsam/hybrid/HybridGaussianConditional.cpp | 12 +++++------- gtsam/hybrid/HybridGaussianConditional.h | 12 +++++++++--- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 6 +++--- gtsam/inference/Conditional-inst.h | 13 ++++++++++--- gtsam/inference/Conditional.h | 13 ++++++++++--- gtsam/linear/GaussianBayesNet.cpp | 15 ++++++++------- gtsam/linear/GaussianConditional.cpp | 8 ++++---- gtsam/linear/GaussianConditional.h | 6 ++++-- gtsam/linear/NoiseModel.cpp | 6 +++++- gtsam/linear/NoiseModel.h | 10 ++++++++-- gtsam/linear/linear.i | 1 + 15 files changed, 84 insertions(+), 44 deletions(-) diff --git a/gtsam/discrete/DiscreteConditional.cpp b/gtsam/discrete/DiscreteConditional.cpp index 9cf19638c..3e74cebb4 100644 --- a/gtsam/discrete/DiscreteConditional.cpp +++ b/gtsam/discrete/DiscreteConditional.cpp @@ -465,6 +465,12 @@ string DiscreteConditional::html(const KeyFormatter& keyFormatter, double DiscreteConditional::evaluate(const HybridValues& x) const { return this->evaluate(x.discrete()); } + +/* ************************************************************************* */ +double DiscreteConditional::errorConstant() const { + return 0.0; +} + /* ************************************************************************* */ } // namespace gtsam diff --git a/gtsam/discrete/DiscreteConditional.h b/gtsam/discrete/DiscreteConditional.h index 4c7af1489..53331a0be 100644 --- a/gtsam/discrete/DiscreteConditional.h +++ b/gtsam/discrete/DiscreteConditional.h @@ -264,11 +264,12 @@ class GTSAM_EXPORT DiscreteConditional } /** - * logNormalizationConstant K is just zero, such that + * errorConstant is just zero, such that * logProbability(x) = log(evaluate(x)) = - error(x) * and hence error(x) = - log(evaluate(x)) > 0 for all x. + * Thus -log(K) for the normalization constant k is 0. */ - double logNormalizationConstant() const override { return 0.0; } + double errorConstant() const override; /// @} diff --git a/gtsam/hybrid/HybridConditional.cpp b/gtsam/hybrid/HybridConditional.cpp index 3cb3bba65..57d8bba39 100644 --- a/gtsam/hybrid/HybridConditional.cpp +++ b/gtsam/hybrid/HybridConditional.cpp @@ -161,18 +161,18 @@ double HybridConditional::logProbability(const HybridValues &values) const { } /* ************************************************************************ */ -double HybridConditional::logNormalizationConstant() const { +double HybridConditional::errorConstant() const { if (auto gc = asGaussian()) { - return gc->logNormalizationConstant(); + return gc->errorConstant(); } if (auto gm = asHybrid()) { - return gm->logNormalizationConstant(); // 0.0! + return gm->errorConstant(); // 0.0! } if (auto dc = asDiscrete()) { - return dc->logNormalizationConstant(); // 0.0! + return dc->errorConstant(); // 0.0! } throw std::runtime_error( - "HybridConditional::logProbability: conditional type not handled"); + "HybridConditional::errorConstant: conditional type not handled"); } /* ************************************************************************ */ diff --git a/gtsam/hybrid/HybridConditional.h b/gtsam/hybrid/HybridConditional.h index 51eeeb5bb..92118385c 100644 --- a/gtsam/hybrid/HybridConditional.h +++ b/gtsam/hybrid/HybridConditional.h @@ -193,11 +193,12 @@ class GTSAM_EXPORT HybridConditional double logProbability(const HybridValues& values) const override; /** - * Return the log normalization constant. + * Return the negative log of the normalization constant. + * This shows up in the error as -(error(x) + errorConstant) * Note this is 0.0 for discrete and hybrid conditionals, but depends * on the continuous parameters for Gaussian conditionals. */ - double logNormalizationConstant() const override; + double errorConstant() const override; /// Return the probability (or density) of the underlying conditional. double evaluate(const HybridValues& values) const override; diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 4ee5241a3..f6cd3e5f7 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -36,7 +36,7 @@ HybridGaussianFactor::FactorValuePairs GetFactorValuePairs( // Check if conditional is pruned if (conditional) { // Assign log(\sqrt(|2πΣ|)) = -log(1 / sqrt(|2πΣ|)) - value = conditional->logNormalizationConstant(); + value = conditional->errorConstant(); } return {std::dynamic_pointer_cast(conditional), value}; }; @@ -57,8 +57,8 @@ HybridGaussianConditional::HybridGaussianConditional( conditionals_.visit( [this](const GaussianConditional::shared_ptr &conditional) { if (conditional) { - this->logConstant_ = std::min( - this->logConstant_, conditional->logNormalizationConstant()); + this->logConstant_ = + std::min(this->logConstant_, conditional->errorConstant()); } }); } @@ -84,8 +84,7 @@ GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree() auto wrap = [this](const GaussianConditional::shared_ptr &gc) { // First check if conditional has not been pruned if (gc) { - const double Cgm_Kgcm = - gc->logNormalizationConstant() - this->logConstant_; + const double Cgm_Kgcm = gc->errorConstant() - this->logConstant_; // If there is a difference in the covariances, we need to account for // that since the error is dependent on the mode. if (Cgm_Kgcm > 0.0) { @@ -215,8 +214,7 @@ std::shared_ptr HybridGaussianConditional::likelihood( [&](const GaussianConditional::shared_ptr &conditional) -> GaussianFactorValuePair { const auto likelihood_m = conditional->likelihood(given); - const double Cgm_Kgcm = - conditional->logNormalizationConstant() - logConstant_; + const double Cgm_Kgcm = conditional->errorConstant() - logConstant_; if (Cgm_Kgcm == 0.0) { return {likelihood_m, 0.0}; } else { diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 676c45f61..8851c7741 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -150,9 +150,15 @@ class GTSAM_EXPORT HybridGaussianConditional /// Returns the continuous keys among the parents. KeyVector continuousParents() const; - /// The log normalization constant is max of the the individual - /// log-normalization constants. - double logNormalizationConstant() const override { return logConstant_; } + /** + * @brief Return log normalization constant in negative log space. + * + * The log normalization constant is the max of the individual + * log-normalization constants. + * + * @return double + */ + inline double errorConstant() const override { return logConstant_; } /** * Create a likelihood factor for a hybrid Gaussian conditional, diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index ff9ca7a55..a6c15d1fd 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -329,9 +329,9 @@ static std::shared_ptr createDiscreteFactor( // Logspace version of: // exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); - // logNormalizationConstant gives `-log(k)` + // errorConstant gives `-log(k)` // which is `-log(k) = log(1/k) = log(\sqrt{|2πΣ|})`. - return -factor->error(kEmpty) + conditional->logNormalizationConstant(); + return -factor->error(kEmpty) + conditional->errorConstant(); }; AlgebraicDecisionTree logProbabilities( @@ -357,7 +357,7 @@ static std::shared_ptr createHybridGaussianFactor( // Add 2.0 term since the constant term will be premultiplied by 0.5 // as per the Hessian definition, // and negative since we want log(k) - hf->constantTerm() += -2.0 * conditional->logNormalizationConstant(); + hf->constantTerm() += -2.0 * conditional->errorConstant(); } return {factor, 0.0}; }; diff --git a/gtsam/inference/Conditional-inst.h b/gtsam/inference/Conditional-inst.h index b61a5ac22..ba42a9a24 100644 --- a/gtsam/inference/Conditional-inst.h +++ b/gtsam/inference/Conditional-inst.h @@ -59,10 +59,17 @@ double Conditional::evaluate( /* ************************************************************************* */ template -double Conditional::logNormalizationConstant() +double Conditional::errorConstant() const { throw std::runtime_error( - "Conditional::logNormalizationConstant is not implemented"); + "Conditional::errorConstant is not implemented"); +} + +/* ************************************************************************* */ +template +double Conditional::logNormalizationConstant() + const { + return -errorConstant(); } /* ************************************************************************* */ @@ -89,7 +96,7 @@ bool Conditional::CheckInvariants( // normalization constant const double error = conditional.error(values); if (error < 0.0) return false; // prob_or_density is negative. - const double expected = -(conditional.logNormalizationConstant() + error); + const double expected = -(conditional.errorConstant() + error); if (std::abs(logProb - expected) > 1e-9) return false; // logProb is not consistent with error return true; diff --git a/gtsam/inference/Conditional.h b/gtsam/inference/Conditional.h index 936a078a8..d09341489 100644 --- a/gtsam/inference/Conditional.h +++ b/gtsam/inference/Conditional.h @@ -164,9 +164,16 @@ namespace gtsam { } /** - * All conditional types need to implement a - * (negative) log normalization constant - * to make it such that error>=0. + * @brief All conditional types need to implement this as the negative log + * of the normalization constant. + * + * @return double + */ + virtual double errorConstant() const; + + /** + * All conditional types need to implement a log normalization constant to + * make it such that error>=0. */ virtual double logNormalizationConstant() const; diff --git a/gtsam/linear/GaussianBayesNet.cpp b/gtsam/linear/GaussianBayesNet.cpp index b7f296ea5..978475bcb 100644 --- a/gtsam/linear/GaussianBayesNet.cpp +++ b/gtsam/linear/GaussianBayesNet.cpp @@ -247,14 +247,15 @@ namespace gtsam { /* normalization constant = 1.0 / sqrt((2*pi)^n*det(Sigma)) logConstant = -log(normalizationConstant) - = 0.5 * n*log(2*pi) + 0.5 * log(det(Sigma)) - - log(det(Sigma)) = -2.0 * logDeterminant() - thus, logConstant = 0.5*n*log(2*pi) - logDeterminant() + = -0.5 * n*log(2*pi) - 0.5 * log(det(Sigma)) - BayesNet logConstant = sum(0.5*n_i*log(2*pi) - logDeterminant_i()) - = sum(0.5*n_i*log(2*pi)) - sum(logDeterminant_i()) - = sum(0.5*n_i*log(2*pi)) - bn->logDeterminant() + log(det(Sigma)) = -2.0 * logDeterminant() + thus, logConstant = -0.5*n*log(2*pi) + logDeterminant() + + BayesNet logConstant = sum(-0.5*n_i*log(2*pi) + logDeterminant_i()) + = sum(-0.5*n_i*log(2*pi)) + sum(logDeterminant_i()) + = sum(-0.5*n_i*log(2*pi)) + bn->logDeterminant() + = sum(logNormalizationConstant_i) */ double logNormConst = 0.0; for (const sharedConditional& cg : *this) { diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index fc633d04d..ca4a1ed33 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -182,7 +182,7 @@ namespace gtsam { /* ************************************************************************* */ // normalization constant = 1.0 / sqrt((2*pi)^n*det(Sigma)) // neg-log = 0.5 * n*log(2*pi) + 0.5 * log det(Sigma) - double GaussianConditional::logNormalizationConstant() const { + double GaussianConditional::errorConstant() const { constexpr double log2pi = 1.8378770664093454835606594728112; size_t n = d().size(); // Sigma = (R'R)^{-1}, det(Sigma) = det((R'R)^{-1}) = det(R'R)^{-1} @@ -195,10 +195,10 @@ namespace gtsam { } /* ************************************************************************* */ - // density = 1/k exp(-error(x)) - // log = -log(k) - error(x) + // density = k exp(-error(x)) + // log = log(k) - error(x) double GaussianConditional::logProbability(const VectorValues& x) const { - return -logNormalizationConstant() - error(x); + return logNormalizationConstant() - error(x); } double GaussianConditional::logProbability(const HybridValues& x) const { diff --git a/gtsam/linear/GaussianConditional.h b/gtsam/linear/GaussianConditional.h index 757687ed8..31f3797d5 100644 --- a/gtsam/linear/GaussianConditional.h +++ b/gtsam/linear/GaussianConditional.h @@ -133,12 +133,14 @@ namespace gtsam { /// @{ /** - * Return normalization constant in negative log space. + * @brief Return the negative log of the normalization constant. * * normalization constant k = 1.0 / sqrt((2*pi)^n*det(Sigma)) * -log(k) = 0.5 * n*log(2*pi) + 0.5 * log det(Sigma) + * + * @return double */ - double logNormalizationConstant() const override; + double errorConstant() const override; /** * Calculate log-probability log(evaluate(x)) for given values `x`: diff --git a/gtsam/linear/NoiseModel.cpp b/gtsam/linear/NoiseModel.cpp index ea9ec1aec..39901584b 100644 --- a/gtsam/linear/NoiseModel.cpp +++ b/gtsam/linear/NoiseModel.cpp @@ -255,7 +255,7 @@ double Gaussian::logDeterminant() const { } /* *******************************************************************************/ -double Gaussian::logNormalizationConstant() const { +double Gaussian::errorConstant() const { // log(det(Sigma)) = -2.0 * logDetR // which gives neg-log = 0.5*n*log(2*pi) + 0.5*(-2.0 * logDetR()) // = 0.5*n*log(2*pi) - (0.5*2.0 * logDetR()) @@ -266,6 +266,10 @@ double Gaussian::logNormalizationConstant() const { return 0.5 * n * log2pi - logDetR(); } +/* *******************************************************************************/ +double Gaussian::logNormalizationConstant() const { + return -errorConstant(); +} /* ************************************************************************* */ // Diagonal diff --git a/gtsam/linear/NoiseModel.h b/gtsam/linear/NoiseModel.h index 31ffd1107..851dc3c1f 100644 --- a/gtsam/linear/NoiseModel.h +++ b/gtsam/linear/NoiseModel.h @@ -271,11 +271,17 @@ namespace gtsam { /// Compute the log of |Σ| double logDeterminant() const; + /** + * @brief Compute the negative log of the normalization constant + * for a Gaussian noise model k = \sqrt(1/|2πΣ|). + * + * @return double + */ + double errorConstant() const; + /** * @brief Method to compute the normalization constant * for a Gaussian noise model k = \sqrt(1/|2πΣ|). - * We compute this in the negative log-space for numerical accuracy, - * thus returning -log(k). * * @return double */ diff --git a/gtsam/linear/linear.i b/gtsam/linear/linear.i index 9c448de50..71eb94dd5 100644 --- a/gtsam/linear/linear.i +++ b/gtsam/linear/linear.i @@ -548,6 +548,7 @@ virtual class GaussianConditional : gtsam::JacobianFactor { bool equals(const gtsam::GaussianConditional& cg, double tol) const; // Standard Interface + double errorConstant() const; double logNormalizationConstant() const; double logProbability(const gtsam::VectorValues& x) const; double evaluate(const gtsam::VectorValues& x) const; From 6488a0ceecfc18dad6d494e8589a507c92a72419 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 22 Sep 2024 22:16:59 -0400 Subject: [PATCH 301/398] updated tests --- .../tests/testHybridGaussianConditional.cpp | 20 ++++++------ .../hybrid/tests/testHybridGaussianFactor.cpp | 5 ++- .../tests/testHybridNonlinearFactorGraph.cpp | 31 +++++++++---------- gtsam/linear/tests/testGaussianBayesNet.cpp | 4 +-- .../linear/tests/testGaussianConditional.cpp | 8 ++--- gtsam/linear/tests/testGaussianDensity.cpp | 2 +- gtsam/linear/tests/testNoiseModel.cpp | 12 +++---- python/gtsam/tests/test_HybridBayesNet.py | 3 +- 8 files changed, 41 insertions(+), 44 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index 040cd2ff0..6c1037e1d 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -180,16 +180,16 @@ TEST(HybridGaussianConditional, Error2) { // Check result. DiscreteKeys discrete_keys{mode}; - double logNormalizer0 = conditionals[0]->logNormalizationConstant(); - double logNormalizer1 = conditionals[1]->logNormalizationConstant(); - double minLogNormalizer = std::min(logNormalizer0, logNormalizer1); + double errorConstant0 = conditionals[0]->errorConstant(); + double errorConstant1 = conditionals[1]->errorConstant(); + double minErrorConstant = std::min(errorConstant0, errorConstant1); // Expected error is e(X) + log(sqrt(|2πΣ|)). - // We normalize log(sqrt(|2πΣ|)) with min(logNormalizers) + // We normalize log(sqrt(|2πΣ|)) with min(errorConstant) // so it is non-negative. std::vector leaves = { - conditionals[0]->error(vv) + logNormalizer0 - minLogNormalizer, - conditionals[1]->error(vv) + logNormalizer1 - minLogNormalizer}; + conditionals[0]->error(vv) + errorConstant0 - minErrorConstant, + conditionals[1]->error(vv) + errorConstant1 - minErrorConstant}; AlgebraicDecisionTree expected(discrete_keys, leaves); EXPECT(assert_equal(expected, actual, 1e-6)); @@ -198,8 +198,8 @@ TEST(HybridGaussianConditional, Error2) { for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv) + - conditionals[mode]->logNormalizationConstant() - - minLogNormalizer, + conditionals[mode]->errorConstant() - + minErrorConstant, hybrid_conditional.error(hv), 1e-8); } } @@ -231,8 +231,8 @@ TEST(HybridGaussianConditional, Likelihood2) { CHECK(jf1->rows() == 2); // Check that the constant C1 is properly encoded in the JacobianFactor. - const double C1 = conditionals[1]->logNormalizationConstant() - - hybrid_conditional.logNormalizationConstant(); + const double C1 = hybrid_conditional.logNormalizationConstant() - + conditionals[1]->logNormalizationConstant(); const double c1 = std::sqrt(2.0 * C1); Vector expected_unwhitened(2); expected_unwhitened << 4.9 - 5.0, -c1; diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 03130bc10..d84948c75 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -780,9 +780,8 @@ static HybridGaussianFactorGraph CreateFactorGraph( // Create HybridGaussianFactor // We take negative since we want // the underlying scalar to be log(\sqrt(|2πΣ|)) - std::vector factors{ - {f0, model0->logNormalizationConstant()}, - {f1, model1->logNormalizationConstant()}}; + std::vector factors{{f0, model0->errorConstant()}, + {f1, model1->errorConstant()}}; HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors); HybridGaussianFactorGraph hfg; diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index cd9b24b37..5f1108ace 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -714,26 +714,26 @@ factor 6: P( m1 | m0 ): size: 3 conditional 0: Hybrid P( x0 | x1 m0) Discrete Keys = (m0, 2), - logNormalizationConstant: -1.38862 + logNormalizationConstant: 1.38862 Choice(m0) 0 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.85087 ] - logNormalizationConstant: -1.38862 + logNormalizationConstant: 1.38862 No noise model 1 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.95037 ] - logNormalizationConstant: -1.38862 + logNormalizationConstant: 1.38862 No noise model conditional 1: Hybrid P( x1 | x2 m0 m1) Discrete Keys = (m0, 2), (m1, 2), - logNormalizationConstant: -1.3935 + logNormalizationConstant: 1.3935 Choice(m1) 0 Choice(m0) @@ -741,14 +741,14 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.99901 ] - logNormalizationConstant: -1.3935 + logNormalizationConstant: 1.3935 No noise model 0 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.90098 ] - logNormalizationConstant: -1.3935 + logNormalizationConstant: 1.3935 No noise model 1 Choice(m0) @@ -756,19 +756,19 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10.098 ] - logNormalizationConstant: -1.3935 + logNormalizationConstant: 1.3935 No noise model 1 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10 ] - logNormalizationConstant: -1.3935 + logNormalizationConstant: 1.3935 No noise model conditional 2: Hybrid P( x2 | m0 m1) Discrete Keys = (m0, 2), (m1, 2), - logNormalizationConstant: -1.38857 + logNormalizationConstant: 1.38857 Choice(m1) 0 Choice(m0) @@ -777,7 +777,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1489 ] mean: 1 elements x2: -1.0099 - logNormalizationConstant: -1.38857 + logNormalizationConstant: 1.38857 No noise model 0 1 Leaf p(x2) @@ -785,7 +785,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1479 ] mean: 1 elements x2: -1.0098 - logNormalizationConstant: -1.38857 + logNormalizationConstant: 1.38857 No noise model 1 Choice(m0) @@ -794,7 +794,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0504 ] mean: 1 elements x2: -1.0001 - logNormalizationConstant: -1.38857 + logNormalizationConstant: 1.38857 No noise model 1 1 Leaf p(x2) @@ -802,7 +802,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0494 ] mean: 1 elements x2: -1 - logNormalizationConstant: -1.38857 + logNormalizationConstant: 1.38857 No noise model )"; @@ -902,9 +902,8 @@ static HybridNonlinearFactorGraph CreateFactorGraph( // Create HybridNonlinearFactor // We take negative since we want // the underlying scalar to be log(\sqrt(|2πΣ|)) - std::vector factors{ - {f0, model0->logNormalizationConstant()}, - {f1, model1->logNormalizationConstant()}}; + std::vector factors{{f0, model0->errorConstant()}, + {f1, model1->errorConstant()}}; HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors); diff --git a/gtsam/linear/tests/testGaussianBayesNet.cpp b/gtsam/linear/tests/testGaussianBayesNet.cpp index a186eb2b2..99453ee4e 100644 --- a/gtsam/linear/tests/testGaussianBayesNet.cpp +++ b/gtsam/linear/tests/testGaussianBayesNet.cpp @@ -76,11 +76,11 @@ TEST(GaussianBayesNet, Evaluate1) { // the normalization constant 1.0/sqrt((2*pi*Sigma).det()). // The covariance matrix inv(Sigma) = R'*R, so the determinant is const double constant = sqrt((invSigma / (2 * M_PI)).determinant()); - EXPECT_DOUBLES_EQUAL(-log(constant), + EXPECT_DOUBLES_EQUAL(log(constant), smallBayesNet.at(0)->logNormalizationConstant() + smallBayesNet.at(1)->logNormalizationConstant(), 1e-9); - EXPECT_DOUBLES_EQUAL(-log(constant), smallBayesNet.logNormalizationConstant(), + EXPECT_DOUBLES_EQUAL(log(constant), smallBayesNet.logNormalizationConstant(), 1e-9); const double actual = smallBayesNet.evaluate(mean); EXPECT_DOUBLES_EQUAL(constant, actual, 1e-9); diff --git a/gtsam/linear/tests/testGaussianConditional.cpp b/gtsam/linear/tests/testGaussianConditional.cpp index b03e0a060..26086104c 100644 --- a/gtsam/linear/tests/testGaussianConditional.cpp +++ b/gtsam/linear/tests/testGaussianConditional.cpp @@ -493,7 +493,7 @@ TEST(GaussianConditional, LogNormalizationConstant) { x.insert(X(0), Vector3::Zero()); Matrix3 Sigma = I_3x3 * sigma * sigma; double expectedLogNormalizationConstant = - -log(1 / sqrt((2 * M_PI * Sigma).determinant())); + log(1 / sqrt((2 * M_PI * Sigma).determinant())); EXPECT_DOUBLES_EQUAL(expectedLogNormalizationConstant, conditional.logNormalizationConstant(), 1e-9); @@ -517,7 +517,7 @@ TEST(GaussianConditional, Print) { " d = [ 20 40 ]\n" " mean: 1 elements\n" " x0: 20 40\n" - " logNormalizationConstant: 4.0351\n" + " logNormalizationConstant: -4.0351\n" "isotropic dim=2 sigma=3\n"; EXPECT(assert_print_equal(expected, conditional, "GaussianConditional")); @@ -532,7 +532,7 @@ TEST(GaussianConditional, Print) { " S[x1] = [ -1 -2 ]\n" " [ -3 -4 ]\n" " d = [ 20 40 ]\n" - " logNormalizationConstant: 4.0351\n" + " logNormalizationConstant: -4.0351\n" "isotropic dim=2 sigma=3\n"; EXPECT(assert_print_equal(expected1, conditional1, "GaussianConditional")); @@ -548,7 +548,7 @@ TEST(GaussianConditional, Print) { " S[y1] = [ -5 -6 ]\n" " [ -7 -8 ]\n" " d = [ 20 40 ]\n" - " logNormalizationConstant: 4.0351\n" + " logNormalizationConstant: -4.0351\n" "isotropic dim=2 sigma=3\n"; EXPECT(assert_print_equal(expected2, conditional2, "GaussianConditional")); } diff --git a/gtsam/linear/tests/testGaussianDensity.cpp b/gtsam/linear/tests/testGaussianDensity.cpp index 3226f40ab..e88fd8cc4 100644 --- a/gtsam/linear/tests/testGaussianDensity.cpp +++ b/gtsam/linear/tests/testGaussianDensity.cpp @@ -55,7 +55,7 @@ TEST(GaussianDensity, FromMeanAndStddev) { double expected1 = 0.5 * e.dot(e); EXPECT_DOUBLES_EQUAL(expected1, density.error(values), 1e-9); - double expected2 = -(density.logNormalizationConstant() + 0.5 * e.dot(e)); + double expected2 = -(density.errorConstant() + 0.5 * e.dot(e)); EXPECT_DOUBLES_EQUAL(expected2, density.logProbability(values), 1e-9); } diff --git a/gtsam/linear/tests/testNoiseModel.cpp b/gtsam/linear/tests/testNoiseModel.cpp index 59ee05d07..5e756a483 100644 --- a/gtsam/linear/tests/testNoiseModel.cpp +++ b/gtsam/linear/tests/testNoiseModel.cpp @@ -810,9 +810,9 @@ TEST(NoiseModel, NonDiagonalGaussian) TEST(NoiseModel, LogNormalizationConstant1D) { // Very simple 1D noise model, which we can compute by hand. double sigma = 0.1; - // For expected values, we compute -log(1/sqrt(|2πΣ|)) by hand. - // = 0.5*(log(2π) + log(Σ)) (since it is 1D) - double expected_value = 0.5 * log(2 * M_PI * sigma * sigma); + // For expected values, we compute log(1/sqrt(|2πΣ|)) by hand. + // = -0.5*(log(2π) + log(Σ)) (since it is 1D) + double expected_value = -0.5 * log(2 * M_PI * sigma * sigma); // Gaussian { @@ -839,7 +839,7 @@ TEST(NoiseModel, LogNormalizationConstant1D) { auto noise_model = Unit::Create(1); double actual_value = noise_model->logNormalizationConstant(); double sigma = 1.0; - expected_value = 0.5 * log(2 * M_PI * sigma * sigma); + expected_value = -0.5 * log(2 * M_PI * sigma * sigma); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); } } @@ -850,7 +850,7 @@ TEST(NoiseModel, LogNormalizationConstant3D) { size_t n = 3; // We compute the expected values just like in the LogNormalizationConstant1D // test, but we multiply by 3 due to the determinant. - double expected_value = 0.5 * n * log(2 * M_PI * sigma * sigma); + double expected_value = -0.5 * n * log(2 * M_PI * sigma * sigma); // Gaussian { @@ -879,7 +879,7 @@ TEST(NoiseModel, LogNormalizationConstant3D) { auto noise_model = Unit::Create(3); double actual_value = noise_model->logNormalizationConstant(); double sigma = 1.0; - expected_value = 0.5 * n * log(2 * M_PI * sigma * sigma); + expected_value = -0.5 * n * log(2 * M_PI * sigma * sigma); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); } } diff --git a/python/gtsam/tests/test_HybridBayesNet.py b/python/gtsam/tests/test_HybridBayesNet.py index bb12ff02b..a72e34062 100644 --- a/python/gtsam/tests/test_HybridBayesNet.py +++ b/python/gtsam/tests/test_HybridBayesNet.py @@ -90,8 +90,7 @@ class TestHybridBayesNet(GtsamTestCase): self.assertTrue(probability >= 0.0) logProb = conditional.logProbability(values) self.assertAlmostEqual(probability, np.exp(logProb)) - expected = -(conditional.logNormalizationConstant() + \ - conditional.error(values)) + expected = -(conditional.errorConstant() + conditional.error(values)) self.assertAlmostEqual(logProb, expected) From 0e676525688e6ff968d46c486dba5aa10103a66a Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 22 Sep 2024 23:04:39 -0400 Subject: [PATCH 302/398] wrap errorConstant for HybridConditional --- gtsam/hybrid/hybrid.i | 1 + 1 file changed, 1 insertion(+) diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index ef179eb9d..81cca3c46 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -61,6 +61,7 @@ virtual class HybridConditional { size_t nrParents() const; // Standard interface: + double errorConstant() const; double logNormalizationConstant() const; double logProbability(const gtsam::HybridValues& values) const; double evaluate(const gtsam::HybridValues& values) const; From 6fd8da829baba9e94d798e965cfaa1ee27544df5 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 22 Sep 2024 23:07:47 -0400 Subject: [PATCH 303/398] wrap errorConstant for DiscreteConditional --- gtsam/discrete/discrete.i | 1 + 1 file changed, 1 insertion(+) diff --git a/gtsam/discrete/discrete.i b/gtsam/discrete/discrete.i index 0bdebd0e1..753c3527a 100644 --- a/gtsam/discrete/discrete.i +++ b/gtsam/discrete/discrete.i @@ -112,6 +112,7 @@ virtual class DiscreteConditional : gtsam::DecisionTreeFactor { const std::vector& table); // Standard interface + double errorConstant() const; double logNormalizationConstant() const; double logProbability(const gtsam::DiscreteValues& values) const; double evaluate(const gtsam::DiscreteValues& values) const; From aae5f9e0405d3364bbd5cc875b80e325f71a79fc Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 22 Sep 2024 23:15:00 -0400 Subject: [PATCH 304/398] fix numpy deprecation --- python/gtsam/tests/test_NonlinearOptimizer.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/python/gtsam/tests/test_NonlinearOptimizer.py b/python/gtsam/tests/test_NonlinearOptimizer.py index a47c5ad62..1b4209509 100644 --- a/python/gtsam/tests/test_NonlinearOptimizer.py +++ b/python/gtsam/tests/test_NonlinearOptimizer.py @@ -14,15 +14,16 @@ from __future__ import print_function import unittest -import gtsam -from gtsam import ( - DoglegOptimizer, DoglegParams, DummyPreconditionerParameters, GaussNewtonOptimizer, - GaussNewtonParams, GncLMParams, GncLossType, GncLMOptimizer, LevenbergMarquardtOptimizer, - LevenbergMarquardtParams, NonlinearFactorGraph, Ordering, PCGSolverParameters, Point2, - PriorFactorPoint2, Values -) from gtsam.utils.test_case import GtsamTestCase +import gtsam +from gtsam import (DoglegOptimizer, DoglegParams, + DummyPreconditionerParameters, GaussNewtonOptimizer, + GaussNewtonParams, GncLMOptimizer, GncLMParams, GncLossType, + LevenbergMarquardtOptimizer, LevenbergMarquardtParams, + NonlinearFactorGraph, Ordering, PCGSolverParameters, Point2, + PriorFactorPoint2, Values) + KEY1 = 1 KEY2 = 2 @@ -136,7 +137,7 @@ class TestScenario(GtsamTestCase): # Test optimizer params optimizer = GncLMOptimizer(self.fg, self.initial_values, params) for ict_factor in (0.9, 1.1): - new_ict = ict_factor * optimizer.getInlierCostThresholds() + new_ict = ict_factor * optimizer.getInlierCostThresholds().item() optimizer.setInlierCostThresholds(new_ict) self.assertAlmostEqual(optimizer.getInlierCostThresholds(), new_ict) for w_factor in (0.8, 0.9): From e09344c6ba5ab8c779eec624422fca56657c6d35 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 23 Sep 2024 03:37:09 -0400 Subject: [PATCH 305/398] replace errorConstant with negLogConstant --- gtsam/discrete/DiscreteConditional.cpp | 2 +- gtsam/discrete/DiscreteConditional.h | 8 ++++---- gtsam/discrete/discrete.i | 2 +- gtsam/hybrid/HybridConditional.cpp | 10 +++++----- gtsam/hybrid/HybridConditional.h | 4 ++-- gtsam/hybrid/HybridGaussianConditional.cpp | 8 ++++---- gtsam/hybrid/HybridGaussianConditional.h | 2 +- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 6 +++--- gtsam/hybrid/hybrid.i | 2 +- .../hybrid/tests/testHybridGaussianConditional.cpp | 14 +++++++------- gtsam/hybrid/tests/testHybridGaussianFactor.cpp | 4 ++-- .../tests/testHybridNonlinearFactorGraph.cpp | 4 ++-- gtsam/inference/Conditional-inst.h | 8 ++++---- gtsam/inference/Conditional.h | 2 +- gtsam/linear/GaussianConditional.cpp | 2 +- gtsam/linear/GaussianConditional.h | 2 +- gtsam/linear/NoiseModel.cpp | 4 ++-- gtsam/linear/NoiseModel.h | 2 +- gtsam/linear/linear.i | 2 +- gtsam/linear/tests/testGaussianDensity.cpp | 2 +- python/gtsam/tests/test_HybridBayesNet.py | 2 +- 21 files changed, 46 insertions(+), 46 deletions(-) diff --git a/gtsam/discrete/DiscreteConditional.cpp b/gtsam/discrete/DiscreteConditional.cpp index 3e74cebb4..3f0c9f511 100644 --- a/gtsam/discrete/DiscreteConditional.cpp +++ b/gtsam/discrete/DiscreteConditional.cpp @@ -467,7 +467,7 @@ double DiscreteConditional::evaluate(const HybridValues& x) const { } /* ************************************************************************* */ -double DiscreteConditional::errorConstant() const { +double DiscreteConditional::negLogConstant() const { return 0.0; } diff --git a/gtsam/discrete/DiscreteConditional.h b/gtsam/discrete/DiscreteConditional.h index 53331a0be..e16100d0a 100644 --- a/gtsam/discrete/DiscreteConditional.h +++ b/gtsam/discrete/DiscreteConditional.h @@ -264,12 +264,12 @@ class GTSAM_EXPORT DiscreteConditional } /** - * errorConstant is just zero, such that - * logProbability(x) = log(evaluate(x)) = - error(x) - * and hence error(x) = - log(evaluate(x)) > 0 for all x. + * negLogConstant is just zero, such that + * -logProbability(x) = -log(evaluate(x)) = error(x) + * and hence error(x) > 0 for all x. * Thus -log(K) for the normalization constant k is 0. */ - double errorConstant() const override; + double negLogConstant() const override; /// @} diff --git a/gtsam/discrete/discrete.i b/gtsam/discrete/discrete.i index 753c3527a..ab3ff75ca 100644 --- a/gtsam/discrete/discrete.i +++ b/gtsam/discrete/discrete.i @@ -112,7 +112,7 @@ virtual class DiscreteConditional : gtsam::DecisionTreeFactor { const std::vector& table); // Standard interface - double errorConstant() const; + double negLogConstant() const; double logNormalizationConstant() const; double logProbability(const gtsam::DiscreteValues& values) const; double evaluate(const gtsam::DiscreteValues& values) const; diff --git a/gtsam/hybrid/HybridConditional.cpp b/gtsam/hybrid/HybridConditional.cpp index 57d8bba39..cac2adcf8 100644 --- a/gtsam/hybrid/HybridConditional.cpp +++ b/gtsam/hybrid/HybridConditional.cpp @@ -161,18 +161,18 @@ double HybridConditional::logProbability(const HybridValues &values) const { } /* ************************************************************************ */ -double HybridConditional::errorConstant() const { +double HybridConditional::negLogConstant() const { if (auto gc = asGaussian()) { - return gc->errorConstant(); + return gc->negLogConstant(); } if (auto gm = asHybrid()) { - return gm->errorConstant(); // 0.0! + return gm->negLogConstant(); // 0.0! } if (auto dc = asDiscrete()) { - return dc->errorConstant(); // 0.0! + return dc->negLogConstant(); // 0.0! } throw std::runtime_error( - "HybridConditional::errorConstant: conditional type not handled"); + "HybridConditional::negLogConstant: conditional type not handled"); } /* ************************************************************************ */ diff --git a/gtsam/hybrid/HybridConditional.h b/gtsam/hybrid/HybridConditional.h index 92118385c..588c44e0b 100644 --- a/gtsam/hybrid/HybridConditional.h +++ b/gtsam/hybrid/HybridConditional.h @@ -194,11 +194,11 @@ class GTSAM_EXPORT HybridConditional /** * Return the negative log of the normalization constant. - * This shows up in the error as -(error(x) + errorConstant) + * This shows up in the error as -(error(x) + negLogConstant) * Note this is 0.0 for discrete and hybrid conditionals, but depends * on the continuous parameters for Gaussian conditionals. */ - double errorConstant() const override; + double negLogConstant() const override; /// Return the probability (or density) of the underlying conditional. double evaluate(const HybridValues& values) const override; diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index f6cd3e5f7..f1220505d 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -36,7 +36,7 @@ HybridGaussianFactor::FactorValuePairs GetFactorValuePairs( // Check if conditional is pruned if (conditional) { // Assign log(\sqrt(|2πΣ|)) = -log(1 / sqrt(|2πΣ|)) - value = conditional->errorConstant(); + value = conditional->negLogConstant(); } return {std::dynamic_pointer_cast(conditional), value}; }; @@ -58,7 +58,7 @@ HybridGaussianConditional::HybridGaussianConditional( [this](const GaussianConditional::shared_ptr &conditional) { if (conditional) { this->logConstant_ = - std::min(this->logConstant_, conditional->errorConstant()); + std::min(this->logConstant_, conditional->negLogConstant()); } }); } @@ -84,7 +84,7 @@ GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree() auto wrap = [this](const GaussianConditional::shared_ptr &gc) { // First check if conditional has not been pruned if (gc) { - const double Cgm_Kgcm = gc->errorConstant() - this->logConstant_; + const double Cgm_Kgcm = gc->negLogConstant() - this->logConstant_; // If there is a difference in the covariances, we need to account for // that since the error is dependent on the mode. if (Cgm_Kgcm > 0.0) { @@ -214,7 +214,7 @@ std::shared_ptr HybridGaussianConditional::likelihood( [&](const GaussianConditional::shared_ptr &conditional) -> GaussianFactorValuePair { const auto likelihood_m = conditional->likelihood(given); - const double Cgm_Kgcm = conditional->errorConstant() - logConstant_; + const double Cgm_Kgcm = conditional->negLogConstant() - logConstant_; if (Cgm_Kgcm == 0.0) { return {likelihood_m, 0.0}; } else { diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 8851c7741..8566d47ef 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -158,7 +158,7 @@ class GTSAM_EXPORT HybridGaussianConditional * * @return double */ - inline double errorConstant() const override { return logConstant_; } + inline double negLogConstant() const override { return logConstant_; } /** * Create a likelihood factor for a hybrid Gaussian conditional, diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index a6c15d1fd..603b29fb5 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -329,9 +329,9 @@ static std::shared_ptr createDiscreteFactor( // Logspace version of: // exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); - // errorConstant gives `-log(k)` + // negLogConstant gives `-log(k)` // which is `-log(k) = log(1/k) = log(\sqrt{|2πΣ|})`. - return -factor->error(kEmpty) + conditional->errorConstant(); + return -factor->error(kEmpty) + conditional->negLogConstant(); }; AlgebraicDecisionTree logProbabilities( @@ -357,7 +357,7 @@ static std::shared_ptr createHybridGaussianFactor( // Add 2.0 term since the constant term will be premultiplied by 0.5 // as per the Hessian definition, // and negative since we want log(k) - hf->constantTerm() += -2.0 * conditional->errorConstant(); + hf->constantTerm() += -2.0 * conditional->negLogConstant(); } return {factor, 0.0}; }; diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index 81cca3c46..cdeb35046 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -61,7 +61,7 @@ virtual class HybridConditional { size_t nrParents() const; // Standard interface: - double errorConstant() const; + double negLogConstant() const; double logNormalizationConstant() const; double logProbability(const gtsam::HybridValues& values) const; double evaluate(const gtsam::HybridValues& values) const; diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index 6c1037e1d..74f57a740 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -180,16 +180,16 @@ TEST(HybridGaussianConditional, Error2) { // Check result. DiscreteKeys discrete_keys{mode}; - double errorConstant0 = conditionals[0]->errorConstant(); - double errorConstant1 = conditionals[1]->errorConstant(); - double minErrorConstant = std::min(errorConstant0, errorConstant1); + double negLogConstant0 = conditionals[0]->negLogConstant(); + double negLogConstant1 = conditionals[1]->negLogConstant(); + double minErrorConstant = std::min(negLogConstant0, negLogConstant1); // Expected error is e(X) + log(sqrt(|2πΣ|)). - // We normalize log(sqrt(|2πΣ|)) with min(errorConstant) + // We normalize log(sqrt(|2πΣ|)) with min(negLogConstant) // so it is non-negative. std::vector leaves = { - conditionals[0]->error(vv) + errorConstant0 - minErrorConstant, - conditionals[1]->error(vv) + errorConstant1 - minErrorConstant}; + conditionals[0]->error(vv) + negLogConstant0 - minErrorConstant, + conditionals[1]->error(vv) + negLogConstant1 - minErrorConstant}; AlgebraicDecisionTree expected(discrete_keys, leaves); EXPECT(assert_equal(expected, actual, 1e-6)); @@ -198,7 +198,7 @@ TEST(HybridGaussianConditional, Error2) { for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv) + - conditionals[mode]->errorConstant() - + conditionals[mode]->negLogConstant() - minErrorConstant, hybrid_conditional.error(hv), 1e-8); } diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index d84948c75..4b664b8b4 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -780,8 +780,8 @@ static HybridGaussianFactorGraph CreateFactorGraph( // Create HybridGaussianFactor // We take negative since we want // the underlying scalar to be log(\sqrt(|2πΣ|)) - std::vector factors{{f0, model0->errorConstant()}, - {f1, model1->errorConstant()}}; + std::vector factors{{f0, model0->negLogConstant()}, + {f1, model1->negLogConstant()}}; HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors); HybridGaussianFactorGraph hfg; diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 5f1108ace..495444c79 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -902,8 +902,8 @@ static HybridNonlinearFactorGraph CreateFactorGraph( // Create HybridNonlinearFactor // We take negative since we want // the underlying scalar to be log(\sqrt(|2πΣ|)) - std::vector factors{{f0, model0->errorConstant()}, - {f1, model1->errorConstant()}}; + std::vector factors{{f0, model0->negLogConstant()}, + {f1, model1->negLogConstant()}}; HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors); diff --git a/gtsam/inference/Conditional-inst.h b/gtsam/inference/Conditional-inst.h index ba42a9a24..32fae188b 100644 --- a/gtsam/inference/Conditional-inst.h +++ b/gtsam/inference/Conditional-inst.h @@ -59,17 +59,17 @@ double Conditional::evaluate( /* ************************************************************************* */ template -double Conditional::errorConstant() +double Conditional::negLogConstant() const { throw std::runtime_error( - "Conditional::errorConstant is not implemented"); + "Conditional::negLogConstant is not implemented"); } /* ************************************************************************* */ template double Conditional::logNormalizationConstant() const { - return -errorConstant(); + return -negLogConstant(); } /* ************************************************************************* */ @@ -96,7 +96,7 @@ bool Conditional::CheckInvariants( // normalization constant const double error = conditional.error(values); if (error < 0.0) return false; // prob_or_density is negative. - const double expected = -(conditional.errorConstant() + error); + const double expected = -(conditional.negLogConstant() + error); if (std::abs(logProb - expected) > 1e-9) return false; // logProb is not consistent with error return true; diff --git a/gtsam/inference/Conditional.h b/gtsam/inference/Conditional.h index d09341489..c44ed239e 100644 --- a/gtsam/inference/Conditional.h +++ b/gtsam/inference/Conditional.h @@ -169,7 +169,7 @@ namespace gtsam { * * @return double */ - virtual double errorConstant() const; + virtual double negLogConstant() const; /** * All conditional types need to implement a log normalization constant to diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index ca4a1ed33..7508524c3 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -182,7 +182,7 @@ namespace gtsam { /* ************************************************************************* */ // normalization constant = 1.0 / sqrt((2*pi)^n*det(Sigma)) // neg-log = 0.5 * n*log(2*pi) + 0.5 * log det(Sigma) - double GaussianConditional::errorConstant() const { + double GaussianConditional::negLogConstant() const { constexpr double log2pi = 1.8378770664093454835606594728112; size_t n = d().size(); // Sigma = (R'R)^{-1}, det(Sigma) = det((R'R)^{-1}) = det(R'R)^{-1} diff --git a/gtsam/linear/GaussianConditional.h b/gtsam/linear/GaussianConditional.h index 31f3797d5..27270fece 100644 --- a/gtsam/linear/GaussianConditional.h +++ b/gtsam/linear/GaussianConditional.h @@ -140,7 +140,7 @@ namespace gtsam { * * @return double */ - double errorConstant() const override; + double negLogConstant() const override; /** * Calculate log-probability log(evaluate(x)) for given values `x`: diff --git a/gtsam/linear/NoiseModel.cpp b/gtsam/linear/NoiseModel.cpp index 39901584b..68c2ffda7 100644 --- a/gtsam/linear/NoiseModel.cpp +++ b/gtsam/linear/NoiseModel.cpp @@ -255,7 +255,7 @@ double Gaussian::logDeterminant() const { } /* *******************************************************************************/ -double Gaussian::errorConstant() const { +double Gaussian::negLogConstant() const { // log(det(Sigma)) = -2.0 * logDetR // which gives neg-log = 0.5*n*log(2*pi) + 0.5*(-2.0 * logDetR()) // = 0.5*n*log(2*pi) - (0.5*2.0 * logDetR()) @@ -268,7 +268,7 @@ double Gaussian::errorConstant() const { /* *******************************************************************************/ double Gaussian::logNormalizationConstant() const { - return -errorConstant(); + return -negLogConstant(); } /* ************************************************************************* */ diff --git a/gtsam/linear/NoiseModel.h b/gtsam/linear/NoiseModel.h index 851dc3c1f..5ad48cade 100644 --- a/gtsam/linear/NoiseModel.h +++ b/gtsam/linear/NoiseModel.h @@ -277,7 +277,7 @@ namespace gtsam { * * @return double */ - double errorConstant() const; + double negLogConstant() const; /** * @brief Method to compute the normalization constant diff --git a/gtsam/linear/linear.i b/gtsam/linear/linear.i index 71eb94dd5..23b6fb3f6 100644 --- a/gtsam/linear/linear.i +++ b/gtsam/linear/linear.i @@ -548,7 +548,7 @@ virtual class GaussianConditional : gtsam::JacobianFactor { bool equals(const gtsam::GaussianConditional& cg, double tol) const; // Standard Interface - double errorConstant() const; + double negLogConstant() const; double logNormalizationConstant() const; double logProbability(const gtsam::VectorValues& x) const; double evaluate(const gtsam::VectorValues& x) const; diff --git a/gtsam/linear/tests/testGaussianDensity.cpp b/gtsam/linear/tests/testGaussianDensity.cpp index e88fd8cc4..b2861e833 100644 --- a/gtsam/linear/tests/testGaussianDensity.cpp +++ b/gtsam/linear/tests/testGaussianDensity.cpp @@ -55,7 +55,7 @@ TEST(GaussianDensity, FromMeanAndStddev) { double expected1 = 0.5 * e.dot(e); EXPECT_DOUBLES_EQUAL(expected1, density.error(values), 1e-9); - double expected2 = -(density.errorConstant() + 0.5 * e.dot(e)); + double expected2 = -(density.negLogConstant() + 0.5 * e.dot(e)); EXPECT_DOUBLES_EQUAL(expected2, density.logProbability(values), 1e-9); } diff --git a/python/gtsam/tests/test_HybridBayesNet.py b/python/gtsam/tests/test_HybridBayesNet.py index a72e34062..bf2b6a033 100644 --- a/python/gtsam/tests/test_HybridBayesNet.py +++ b/python/gtsam/tests/test_HybridBayesNet.py @@ -90,7 +90,7 @@ class TestHybridBayesNet(GtsamTestCase): self.assertTrue(probability >= 0.0) logProb = conditional.logProbability(values) self.assertAlmostEqual(probability, np.exp(logProb)) - expected = -(conditional.errorConstant() + conditional.error(values)) + expected = -(conditional.negLogConstant() + conditional.error(values)) self.assertAlmostEqual(logProb, expected) From e95b8be014e245f4300eddb86c1dd400cf37f9e6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 23 Sep 2024 14:54:53 -0400 Subject: [PATCH 306/398] kill logNormalizationConstant in favor of negLogConstant --- gtsam/discrete/discrete.i | 1 - gtsam/hybrid/HybridGaussianConditional.cpp | 2 +- gtsam/hybrid/hybrid.i | 1 - .../tests/testHybridGaussianConditional.cpp | 14 ++++---- gtsam/inference/Conditional-inst.h | 19 +++-------- gtsam/inference/Conditional.h | 17 ++++------ gtsam/linear/GaussianBayesNet.cpp | 21 ++++++------ gtsam/linear/GaussianBayesNet.h | 6 ++-- gtsam/linear/GaussianConditional.cpp | 4 +-- gtsam/linear/NoiseModel.cpp | 5 --- gtsam/linear/NoiseModel.h | 10 +----- gtsam/linear/linear.i | 1 - gtsam/linear/tests/testGaussianBayesNet.cpp | 9 +++-- .../linear/tests/testGaussianConditional.cpp | 10 +++--- gtsam/linear/tests/testNoiseModel.cpp | 34 +++++++++---------- 15 files changed, 61 insertions(+), 93 deletions(-) diff --git a/gtsam/discrete/discrete.i b/gtsam/discrete/discrete.i index ab3ff75ca..55c8f9e22 100644 --- a/gtsam/discrete/discrete.i +++ b/gtsam/discrete/discrete.i @@ -113,7 +113,6 @@ virtual class DiscreteConditional : gtsam::DecisionTreeFactor { // Standard interface double negLogConstant() const; - double logNormalizationConstant() const; double logProbability(const gtsam::DiscreteValues& values) const; double evaluate(const gtsam::DiscreteValues& values) const; double error(const gtsam::DiscreteValues& values) const; diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index f1220505d..a7ed9e939 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -155,7 +155,7 @@ void HybridGaussianConditional::print(const std::string &s, std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), "; } std::cout << std::endl - << " logNormalizationConstant: " << logNormalizationConstant() + << " logNormalizationConstant: " << -negLogConstant() << std::endl << std::endl; conditionals_.print( diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index cdeb35046..82881ac47 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -62,7 +62,6 @@ virtual class HybridConditional { // Standard interface: double negLogConstant() const; - double logNormalizationConstant() const; double logProbability(const gtsam::HybridValues& values) const; double evaluate(const gtsam::HybridValues& values) const; double operator()(const gtsam::HybridValues& values) const; diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index 74f57a740..adcf6c70f 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -61,11 +61,11 @@ const HybridGaussianConditional hybrid_conditional({Z(0)}, {X(0)}, mode, TEST(HybridGaussianConditional, Invariants) { using namespace equal_constants; - // Check that the conditional normalization constant is the max of all - // constants which are all equal, in this case, hence: - const double K = hybrid_conditional.logNormalizationConstant(); - EXPECT_DOUBLES_EQUAL(K, conditionals[0]->logNormalizationConstant(), 1e-8); - EXPECT_DOUBLES_EQUAL(K, conditionals[1]->logNormalizationConstant(), 1e-8); + // Check that the conditional (negative log) normalization constant is the min + // of all constants which are all equal, in this case, hence: + const double K = hybrid_conditional.negLogConstant(); + EXPECT_DOUBLES_EQUAL(K, conditionals[0]->negLogConstant(), 1e-8); + EXPECT_DOUBLES_EQUAL(K, conditionals[1]->negLogConstant(), 1e-8); EXPECT(HybridGaussianConditional::CheckInvariants(hybrid_conditional, hv0)); EXPECT(HybridGaussianConditional::CheckInvariants(hybrid_conditional, hv1)); @@ -231,8 +231,8 @@ TEST(HybridGaussianConditional, Likelihood2) { CHECK(jf1->rows() == 2); // Check that the constant C1 is properly encoded in the JacobianFactor. - const double C1 = hybrid_conditional.logNormalizationConstant() - - conditionals[1]->logNormalizationConstant(); + const double C1 = + conditionals[1]->negLogConstant() - hybrid_conditional.negLogConstant(); const double c1 = std::sqrt(2.0 * C1); Vector expected_unwhitened(2); expected_unwhitened << 4.9 - 5.0, -c1; diff --git a/gtsam/inference/Conditional-inst.h b/gtsam/inference/Conditional-inst.h index 32fae188b..ca4d0981f 100644 --- a/gtsam/inference/Conditional-inst.h +++ b/gtsam/inference/Conditional-inst.h @@ -59,23 +59,14 @@ double Conditional::evaluate( /* ************************************************************************* */ template -double Conditional::negLogConstant() - const { - throw std::runtime_error( - "Conditional::negLogConstant is not implemented"); -} - -/* ************************************************************************* */ -template -double Conditional::logNormalizationConstant() - const { - return -negLogConstant(); +double Conditional::negLogConstant() const { + throw std::runtime_error("Conditional::negLogConstant is not implemented"); } /* ************************************************************************* */ template double Conditional::normalizationConstant() const { - return std::exp(logNormalizationConstant()); + return std::exp(-negLogConstant()); } /* ************************************************************************* */ @@ -90,8 +81,8 @@ bool Conditional::CheckInvariants( const double logProb = conditional.logProbability(values); if (std::abs(prob_or_density - std::exp(logProb)) > 1e-9) return false; // logProb is not consistent with prob_or_density - if (std::abs(conditional.logNormalizationConstant() - - std::log(conditional.normalizationConstant())) > 1e-9) + if (std::abs(conditional.negLogConstant() - + (-std::log(conditional.normalizationConstant()))) > 1e-9) return false; // log normalization constant is not consistent with // normalization constant const double error = conditional.error(values); diff --git a/gtsam/inference/Conditional.h b/gtsam/inference/Conditional.h index c44ed239e..f37a1b7a4 100644 --- a/gtsam/inference/Conditional.h +++ b/gtsam/inference/Conditional.h @@ -39,7 +39,8 @@ namespace gtsam { * logProbability(x) = -(K + error(x)) * i.e., K = -log(k). The normalization constant k is assumed to *not* depend * on any argument, only (possibly) on the conditional parameters. - * This class provides a default logNormalizationConstant() == 0.0. + * This class provides a default negative log normalization constant + * negLogConstant() == 0.0. * * There are four broad classes of conditionals that derive from Conditional: * @@ -165,19 +166,13 @@ namespace gtsam { /** * @brief All conditional types need to implement this as the negative log - * of the normalization constant. + * of the normalization constant to make it such that error>=0. * * @return double */ virtual double negLogConstant() const; - /** - * All conditional types need to implement a log normalization constant to - * make it such that error>=0. - */ - virtual double logNormalizationConstant() const; - - /** Non-virtual, exponentiate logNormalizationConstant. */ + /** Non-virtual, negate and exponentiate negLogConstant. */ double normalizationConstant() const; /// @} @@ -217,9 +212,9 @@ namespace gtsam { * - evaluate >= 0.0 * - evaluate(x) == conditional(x) * - exp(logProbability(x)) == evaluate(x) - * - logNormalizationConstant() = log(normalizationConstant()) + * - negLogConstant() = -log(normalizationConstant()) * - error >= 0.0 - * - logProbability(x) == logNormalizationConstant() - error(x) + * - logProbability(x) == -(negLogConstant() + error(x)) * * @param conditional The conditional to test, as a reference to the derived type. * @tparam VALUES HybridValues, or a more narrow type like DiscreteValues. diff --git a/gtsam/linear/GaussianBayesNet.cpp b/gtsam/linear/GaussianBayesNet.cpp index 978475bcb..f8c4ddc4c 100644 --- a/gtsam/linear/GaussianBayesNet.cpp +++ b/gtsam/linear/GaussianBayesNet.cpp @@ -243,25 +243,24 @@ namespace gtsam { } /* ************************************************************************* */ - double GaussianBayesNet::logNormalizationConstant() const { + double GaussianBayesNet::negLogConstant() const { /* normalization constant = 1.0 / sqrt((2*pi)^n*det(Sigma)) - logConstant = -log(normalizationConstant) - = -0.5 * n*log(2*pi) - 0.5 * log(det(Sigma)) + negLogConstant = -log(normalizationConstant) + = 0.5 * n*log(2*pi) + 0.5 * log(det(Sigma)) log(det(Sigma)) = -2.0 * logDeterminant() - thus, logConstant = -0.5*n*log(2*pi) + logDeterminant() + thus, negLogConstant = 0.5*n*log(2*pi) - logDeterminant() - BayesNet logConstant = sum(-0.5*n_i*log(2*pi) + logDeterminant_i()) - = sum(-0.5*n_i*log(2*pi)) + sum(logDeterminant_i()) - = sum(-0.5*n_i*log(2*pi)) + bn->logDeterminant() - = sum(logNormalizationConstant_i) + BayesNet negLogConstant = sum(0.5*n_i*log(2*pi) - logDeterminant_i()) + = sum(0.5*n_i*log(2*pi)) + sum(logDeterminant_i()) + = sum(0.5*n_i*log(2*pi)) + bn->logDeterminant() */ - double logNormConst = 0.0; + double negLogNormConst = 0.0; for (const sharedConditional& cg : *this) { - logNormConst += cg->logNormalizationConstant(); + negLogNormConst += cg->negLogConstant(); } - return logNormConst; + return negLogNormConst; } /* ************************************************************************* */ diff --git a/gtsam/linear/GaussianBayesNet.h b/gtsam/linear/GaussianBayesNet.h index ea1cb8603..e858bbe43 100644 --- a/gtsam/linear/GaussianBayesNet.h +++ b/gtsam/linear/GaussianBayesNet.h @@ -235,12 +235,12 @@ namespace gtsam { double logDeterminant() const; /** - * @brief Get the log of the normalization constant corresponding to the - * joint Gaussian density represented by this Bayes net. + * @brief Get the negative log of the normalization constant corresponding + * to the joint Gaussian density represented by this Bayes net. * * @return double */ - double logNormalizationConstant() const; + double negLogConstant() const; /** * Backsubstitute with a different RHS vector than the one stored in this BayesNet. diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index 7508524c3..7735e5129 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -121,7 +121,7 @@ namespace gtsam { const auto mean = solve({}); // solve for mean. mean.print(" mean", formatter); } - cout << " logNormalizationConstant: " << logNormalizationConstant() << endl; + cout << " logNormalizationConstant: " << -negLogConstant() << endl; if (model_) model_->print(" Noise model: "); else @@ -198,7 +198,7 @@ namespace gtsam { // density = k exp(-error(x)) // log = log(k) - error(x) double GaussianConditional::logProbability(const VectorValues& x) const { - return logNormalizationConstant() - error(x); + return -(negLogConstant() + error(x)); } double GaussianConditional::logProbability(const HybridValues& x) const { diff --git a/gtsam/linear/NoiseModel.cpp b/gtsam/linear/NoiseModel.cpp index 68c2ffda7..1236f1b94 100644 --- a/gtsam/linear/NoiseModel.cpp +++ b/gtsam/linear/NoiseModel.cpp @@ -266,11 +266,6 @@ double Gaussian::negLogConstant() const { return 0.5 * n * log2pi - logDetR(); } -/* *******************************************************************************/ -double Gaussian::logNormalizationConstant() const { - return -negLogConstant(); -} - /* ************************************************************************* */ // Diagonal /* ************************************************************************* */ diff --git a/gtsam/linear/NoiseModel.h b/gtsam/linear/NoiseModel.h index 5ad48cade..34fa63e4c 100644 --- a/gtsam/linear/NoiseModel.h +++ b/gtsam/linear/NoiseModel.h @@ -273,20 +273,12 @@ namespace gtsam { /** * @brief Compute the negative log of the normalization constant - * for a Gaussian noise model k = \sqrt(1/|2πΣ|). + * for a Gaussian noise model k = 1/\sqrt(|2πΣ|). * * @return double */ double negLogConstant() const; - /** - * @brief Method to compute the normalization constant - * for a Gaussian noise model k = \sqrt(1/|2πΣ|). - * - * @return double - */ - double logNormalizationConstant() const; - private: #ifdef GTSAM_ENABLE_BOOST_SERIALIZATION /** Serialization function */ diff --git a/gtsam/linear/linear.i b/gtsam/linear/linear.i index 23b6fb3f6..af6c2ee22 100644 --- a/gtsam/linear/linear.i +++ b/gtsam/linear/linear.i @@ -549,7 +549,6 @@ virtual class GaussianConditional : gtsam::JacobianFactor { // Standard Interface double negLogConstant() const; - double logNormalizationConstant() const; double logProbability(const gtsam::VectorValues& x) const; double evaluate(const gtsam::VectorValues& x) const; double error(const gtsam::VectorValues& x) const; diff --git a/gtsam/linear/tests/testGaussianBayesNet.cpp b/gtsam/linear/tests/testGaussianBayesNet.cpp index 99453ee4e..b1cfbe12c 100644 --- a/gtsam/linear/tests/testGaussianBayesNet.cpp +++ b/gtsam/linear/tests/testGaussianBayesNet.cpp @@ -76,12 +76,11 @@ TEST(GaussianBayesNet, Evaluate1) { // the normalization constant 1.0/sqrt((2*pi*Sigma).det()). // The covariance matrix inv(Sigma) = R'*R, so the determinant is const double constant = sqrt((invSigma / (2 * M_PI)).determinant()); - EXPECT_DOUBLES_EQUAL(log(constant), - smallBayesNet.at(0)->logNormalizationConstant() + - smallBayesNet.at(1)->logNormalizationConstant(), - 1e-9); - EXPECT_DOUBLES_EQUAL(log(constant), smallBayesNet.logNormalizationConstant(), + EXPECT_DOUBLES_EQUAL(-log(constant), + smallBayesNet.at(0)->negLogConstant() + + smallBayesNet.at(1)->negLogConstant(), 1e-9); + EXPECT_DOUBLES_EQUAL(-log(constant), smallBayesNet.negLogConstant(), 1e-9); const double actual = smallBayesNet.evaluate(mean); EXPECT_DOUBLES_EQUAL(constant, actual, 1e-9); } diff --git a/gtsam/linear/tests/testGaussianConditional.cpp b/gtsam/linear/tests/testGaussianConditional.cpp index 26086104c..68894c314 100644 --- a/gtsam/linear/tests/testGaussianConditional.cpp +++ b/gtsam/linear/tests/testGaussianConditional.cpp @@ -486,17 +486,17 @@ TEST(GaussianConditional, Error) { /* ************************************************************************* */ // Similar test for multivariate gaussian but with sigma 2.0 -TEST(GaussianConditional, LogNormalizationConstant) { +TEST(GaussianConditional, NegLogConstant) { double sigma = 2.0; auto conditional = GaussianConditional::FromMeanAndStddev(X(0), Vector3::Zero(), sigma); VectorValues x; x.insert(X(0), Vector3::Zero()); Matrix3 Sigma = I_3x3 * sigma * sigma; - double expectedLogNormalizationConstant = - log(1 / sqrt((2 * M_PI * Sigma).determinant())); + double expectedNegLogConstant = + -log(1 / sqrt((2 * M_PI * Sigma).determinant())); - EXPECT_DOUBLES_EQUAL(expectedLogNormalizationConstant, - conditional.logNormalizationConstant(), 1e-9); + EXPECT_DOUBLES_EQUAL(expectedNegLogConstant, conditional.negLogConstant(), + 1e-9); } /* ************************************************************************* */ diff --git a/gtsam/linear/tests/testNoiseModel.cpp b/gtsam/linear/tests/testNoiseModel.cpp index 5e756a483..2518e4c49 100644 --- a/gtsam/linear/tests/testNoiseModel.cpp +++ b/gtsam/linear/tests/testNoiseModel.cpp @@ -807,50 +807,50 @@ TEST(NoiseModel, NonDiagonalGaussian) } } -TEST(NoiseModel, LogNormalizationConstant1D) { +TEST(NoiseModel, NegLogNormalizationConstant1D) { // Very simple 1D noise model, which we can compute by hand. double sigma = 0.1; - // For expected values, we compute log(1/sqrt(|2πΣ|)) by hand. - // = -0.5*(log(2π) + log(Σ)) (since it is 1D) - double expected_value = -0.5 * log(2 * M_PI * sigma * sigma); + // For expected values, we compute -log(1/sqrt(|2πΣ|)) by hand. + // = 0.5*(log(2π) - log(Σ)) (since it is 1D) + double expected_value = 0.5 * log(2 * M_PI * sigma * sigma); // Gaussian { Matrix11 R; R << 1 / sigma; auto noise_model = Gaussian::SqrtInformation(R); - double actual_value = noise_model->logNormalizationConstant(); + double actual_value = noise_model->negLogConstant(); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); } // Diagonal { auto noise_model = Diagonal::Sigmas(Vector1(sigma)); - double actual_value = noise_model->logNormalizationConstant(); + double actual_value = noise_model->negLogConstant(); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); } // Isotropic { auto noise_model = Isotropic::Sigma(1, sigma); - double actual_value = noise_model->logNormalizationConstant(); + double actual_value = noise_model->negLogConstant(); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); } // Unit { auto noise_model = Unit::Create(1); - double actual_value = noise_model->logNormalizationConstant(); + double actual_value = noise_model->negLogConstant(); double sigma = 1.0; - expected_value = -0.5 * log(2 * M_PI * sigma * sigma); + expected_value = 0.5 * log(2 * M_PI * sigma * sigma); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); } } -TEST(NoiseModel, LogNormalizationConstant3D) { +TEST(NoiseModel, NegLogNormalizationConstant3D) { // Simple 3D noise model, which we can compute by hand. double sigma = 0.1; size_t n = 3; - // We compute the expected values just like in the LogNormalizationConstant1D + // We compute the expected values just like in the NegLogNormalizationConstant1D // test, but we multiply by 3 due to the determinant. - double expected_value = -0.5 * n * log(2 * M_PI * sigma * sigma); + double expected_value = 0.5 * n * log(2 * M_PI * sigma * sigma); // Gaussian { @@ -859,27 +859,27 @@ TEST(NoiseModel, LogNormalizationConstant3D) { 0, 1 / sigma, 4, // 0, 0, 1 / sigma; auto noise_model = Gaussian::SqrtInformation(R); - double actual_value = noise_model->logNormalizationConstant(); + double actual_value = noise_model->negLogConstant(); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); } // Diagonal { auto noise_model = Diagonal::Sigmas(Vector3(sigma, sigma, sigma)); - double actual_value = noise_model->logNormalizationConstant(); + double actual_value = noise_model->negLogConstant(); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); } // Isotropic { auto noise_model = Isotropic::Sigma(n, sigma); - double actual_value = noise_model->logNormalizationConstant(); + double actual_value = noise_model->negLogConstant(); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); } // Unit { auto noise_model = Unit::Create(3); - double actual_value = noise_model->logNormalizationConstant(); + double actual_value = noise_model->negLogConstant(); double sigma = 1.0; - expected_value = -0.5 * n * log(2 * M_PI * sigma * sigma); + expected_value = 0.5 * n * log(2 * M_PI * sigma * sigma); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); } } From 2c7e5eb65658ceb3c3ecef891817702ea8976c2d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 23 Sep 2024 15:07:33 -0400 Subject: [PATCH 307/398] fix docstring --- gtsam/hybrid/HybridGaussianConditional.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 8566d47ef..d2b8982a3 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -153,7 +153,7 @@ class GTSAM_EXPORT HybridGaussianConditional /** * @brief Return log normalization constant in negative log space. * - * The log normalization constant is the max of the individual + * The log normalization constant is the min of the individual * log-normalization constants. * * @return double From 5da7588b669bfc073b4df48e50aa984aff01915b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 23 Sep 2024 15:08:12 -0400 Subject: [PATCH 308/398] factor errors in negative log space --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 603b29fb5..4845a4cd5 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -233,8 +233,8 @@ continuousElimination(const HybridGaussianFactorGraph &factors, /* ************************************************************************ */ /** - * @brief Exponentiate log-values, not necessarily normalized, normalize, and - * return as AlgebraicDecisionTree. + * @brief Exponentiate (not necessarily normalized) log-values, normalize, and + * then return as AlgebraicDecisionTree. * * @param logValues DecisionTree of (unnormalized) log values. * @return AlgebraicDecisionTree @@ -242,9 +242,9 @@ continuousElimination(const HybridGaussianFactorGraph &factors, static AlgebraicDecisionTree probabilitiesFromLogValues( const AlgebraicDecisionTree &logValues) { // Perform normalization - double max_log = logValues.max(); + double min_log = logValues.min(); AlgebraicDecisionTree probabilities = DecisionTree( - logValues, [&max_log](const double x) { return exp(x - max_log); }); + logValues, [&min_log](const double x) { return exp(-(x - min_log)); }); probabilities = probabilities.normalize(probabilities.sum()); return probabilities; @@ -265,7 +265,7 @@ discreteElimination(const HybridGaussianFactorGraph &factors, auto logProbability = [&](const GaussianFactor::shared_ptr &factor) -> double { if (!factor) return 0.0; - return -factor->error(VectorValues()); + return factor->error(VectorValues()); }; AlgebraicDecisionTree logProbabilities = DecisionTree(gmf->factors(), logProbability); @@ -327,11 +327,11 @@ static std::shared_ptr createDiscreteFactor( // If the factor is not null, it has no keys, just contains the residual. if (!factor) return 1.0; // TODO(dellaert): not loving this. - // Logspace version of: + // Negative logspace version of: // exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); // negLogConstant gives `-log(k)` // which is `-log(k) = log(1/k) = log(\sqrt{|2πΣ|})`. - return -factor->error(kEmpty) + conditional->negLogConstant(); + return factor->error(kEmpty) - conditional->negLogConstant(); }; AlgebraicDecisionTree logProbabilities( From 31519f48b4983890f016f4e085887095d1d1c3ab Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 23 Sep 2024 15:09:37 -0400 Subject: [PATCH 309/398] rename to negLogProbability --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 4845a4cd5..85a2de464 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -321,7 +321,7 @@ using Result = std::pair, static std::shared_ptr createDiscreteFactor( const DecisionTree &eliminationResults, const DiscreteKeys &discreteSeparator) { - auto logProbability = [&](const Result &pair) -> double { + auto negLogProbability = [&](const Result &pair) -> double { const auto &[conditional, factor] = pair; static const VectorValues kEmpty; // If the factor is not null, it has no keys, just contains the residual. @@ -334,10 +334,10 @@ static std::shared_ptr createDiscreteFactor( return factor->error(kEmpty) - conditional->negLogConstant(); }; - AlgebraicDecisionTree logProbabilities( - DecisionTree(eliminationResults, logProbability)); + AlgebraicDecisionTree negLogProbabilities( + DecisionTree(eliminationResults, negLogProbability)); AlgebraicDecisionTree probabilities = - probabilitiesFromLogValues(logProbabilities); + probabilitiesFromLogValues(negLogProbabilities); return std::make_shared(discreteSeparator, probabilities); } From 92b829dd55654b708509e911fb84a1720b30195b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 23 Sep 2024 15:55:19 -0400 Subject: [PATCH 310/398] remove normalizationConstant() --- gtsam/inference/Conditional-inst.h | 10 ---------- gtsam/inference/Conditional.h | 3 --- 2 files changed, 13 deletions(-) diff --git a/gtsam/inference/Conditional-inst.h b/gtsam/inference/Conditional-inst.h index ca4d0981f..c21a75d26 100644 --- a/gtsam/inference/Conditional-inst.h +++ b/gtsam/inference/Conditional-inst.h @@ -63,12 +63,6 @@ double Conditional::negLogConstant() const { throw std::runtime_error("Conditional::negLogConstant is not implemented"); } -/* ************************************************************************* */ -template -double Conditional::normalizationConstant() const { - return std::exp(-negLogConstant()); -} - /* ************************************************************************* */ template template @@ -81,10 +75,6 @@ bool Conditional::CheckInvariants( const double logProb = conditional.logProbability(values); if (std::abs(prob_or_density - std::exp(logProb)) > 1e-9) return false; // logProb is not consistent with prob_or_density - if (std::abs(conditional.negLogConstant() - - (-std::log(conditional.normalizationConstant()))) > 1e-9) - return false; // log normalization constant is not consistent with - // normalization constant const double error = conditional.error(values); if (error < 0.0) return false; // prob_or_density is negative. const double expected = -(conditional.negLogConstant() + error); diff --git a/gtsam/inference/Conditional.h b/gtsam/inference/Conditional.h index f37a1b7a4..f9da36b7b 100644 --- a/gtsam/inference/Conditional.h +++ b/gtsam/inference/Conditional.h @@ -172,9 +172,6 @@ namespace gtsam { */ virtual double negLogConstant() const; - /** Non-virtual, negate and exponentiate negLogConstant. */ - double normalizationConstant() const; - /// @} /// @name Advanced Interface /// @{ From 9b3176e5ef8ed89f59978fa2aa06fdba682431be Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 23 Sep 2024 15:55:27 -0400 Subject: [PATCH 311/398] better naming --- gtsam/hybrid/HybridGaussianConditional.cpp | 15 +++++++-------- gtsam/hybrid/HybridGaussianConditional.h | 4 ++-- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 10 +++++----- 3 files changed, 14 insertions(+), 15 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index a7ed9e939..ef38237f2 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -51,14 +51,14 @@ HybridGaussianConditional::HybridGaussianConditional( discreteParents, GetFactorValuePairs(conditionals)), BaseConditional(continuousFrontals.size()), conditionals_(conditionals) { - // Calculate logConstant_ as the minimum of the negative-log normalizers of + // Calculate negLogConstant_ as the minimum of the negative-log normalizers of // the conditionals, by visiting the decision tree: - logConstant_ = std::numeric_limits::infinity(); + negLogConstant_ = std::numeric_limits::infinity(); conditionals_.visit( [this](const GaussianConditional::shared_ptr &conditional) { if (conditional) { - this->logConstant_ = - std::min(this->logConstant_, conditional->negLogConstant()); + this->negLogConstant_ = + std::min(this->negLogConstant_, conditional->negLogConstant()); } }); } @@ -84,7 +84,7 @@ GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree() auto wrap = [this](const GaussianConditional::shared_ptr &gc) { // First check if conditional has not been pruned if (gc) { - const double Cgm_Kgcm = gc->negLogConstant() - this->logConstant_; + const double Cgm_Kgcm = gc->negLogConstant() - this->negLogConstant_; // If there is a difference in the covariances, we need to account for // that since the error is dependent on the mode. if (Cgm_Kgcm > 0.0) { @@ -155,8 +155,7 @@ void HybridGaussianConditional::print(const std::string &s, std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), "; } std::cout << std::endl - << " logNormalizationConstant: " << -negLogConstant() - << std::endl + << " logNormalizationConstant: " << -negLogConstant() << std::endl << std::endl; conditionals_.print( "", [&](Key k) { return formatter(k); }, @@ -214,7 +213,7 @@ std::shared_ptr HybridGaussianConditional::likelihood( [&](const GaussianConditional::shared_ptr &conditional) -> GaussianFactorValuePair { const auto likelihood_m = conditional->likelihood(given); - const double Cgm_Kgcm = conditional->negLogConstant() - logConstant_; + const double Cgm_Kgcm = conditional->negLogConstant() - negLogConstant_; if (Cgm_Kgcm == 0.0) { return {likelihood_m, 0.0}; } else { diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index d2b8982a3..9c70cf6cb 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -66,7 +66,7 @@ class GTSAM_EXPORT HybridGaussianConditional Conditionals conditionals_; ///< a decision tree of Gaussian conditionals. ///< Negative-log of the normalization constant (log(\sqrt(|2πΣ|))). ///< Take advantage of the neg-log space so everything is a minimization - double logConstant_; + double negLogConstant_; /** * @brief Convert a HybridGaussianConditional of conditionals into @@ -158,7 +158,7 @@ class GTSAM_EXPORT HybridGaussianConditional * * @return double */ - inline double negLogConstant() const override { return logConstant_; } + inline double negLogConstant() const override { return negLogConstant_; } /** * Create a likelihood factor for a hybrid Gaussian conditional, diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 85a2de464..82828bb41 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -233,13 +233,13 @@ continuousElimination(const HybridGaussianFactorGraph &factors, /* ************************************************************************ */ /** - * @brief Exponentiate (not necessarily normalized) log-values, normalize, and - * then return as AlgebraicDecisionTree. + * @brief Exponentiate (not necessarily normalized) negative log-values, + * normalize, and then return as AlgebraicDecisionTree. * * @param logValues DecisionTree of (unnormalized) log values. * @return AlgebraicDecisionTree */ -static AlgebraicDecisionTree probabilitiesFromLogValues( +static AlgebraicDecisionTree probabilitiesFromNegativeLogValues( const AlgebraicDecisionTree &logValues) { // Perform normalization double min_log = logValues.min(); @@ -271,7 +271,7 @@ discreteElimination(const HybridGaussianFactorGraph &factors, DecisionTree(gmf->factors(), logProbability); AlgebraicDecisionTree probabilities = - probabilitiesFromLogValues(logProbabilities); + probabilitiesFromNegativeLogValues(logProbabilities); dfg.emplace_shared(gmf->discreteKeys(), probabilities); @@ -337,7 +337,7 @@ static std::shared_ptr createDiscreteFactor( AlgebraicDecisionTree negLogProbabilities( DecisionTree(eliminationResults, negLogProbability)); AlgebraicDecisionTree probabilities = - probabilitiesFromLogValues(negLogProbabilities); + probabilitiesFromNegativeLogValues(negLogProbabilities); return std::make_shared(discreteSeparator, probabilities); } From 6d3bc21a183d7feb67d7279a1948911831aa7adf Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 23 Sep 2024 16:05:23 -0400 Subject: [PATCH 312/398] non-loop versions for checking sigma values --- gtsam/linear/NoiseModel.cpp | 35 ++++++++++++++++++----------------- 1 file changed, 18 insertions(+), 17 deletions(-) diff --git a/gtsam/linear/NoiseModel.cpp b/gtsam/linear/NoiseModel.cpp index de69fdce9..7cb93b17a 100644 --- a/gtsam/linear/NoiseModel.cpp +++ b/gtsam/linear/NoiseModel.cpp @@ -270,10 +270,7 @@ double Gaussian::logNormalizationConstant() const { /* ************************************************************************* */ // Diagonal /* ************************************************************************* */ -Diagonal::Diagonal() : - Gaussian(1) // TODO: Frank asks: really sure about this? -{ -} +Diagonal::Diagonal() : Gaussian() {} /* ************************************************************************* */ Diagonal::Diagonal(const Vector& sigmas) @@ -287,29 +284,32 @@ Diagonal::Diagonal(const Vector& sigmas) Diagonal::shared_ptr Diagonal::Variances(const Vector& variances, bool smart) { if (smart) { // check whether all the same entry - size_t n = variances.size(); - for (size_t j = 1; j < n; j++) - if (variances(j) != variances(0)) goto full; - return Isotropic::Variance(n, variances(0), true); + if ((variances.array() == variances(0)).all()) { + return Isotropic::Variance(variances.size(), variances(0), true); + } else { + return shared_ptr(new Diagonal(variances.cwiseSqrt())); + } } - full: return shared_ptr(new Diagonal(variances.cwiseSqrt())); + return shared_ptr(new Diagonal(variances.cwiseSqrt())); } /* ************************************************************************* */ Diagonal::shared_ptr Diagonal::Sigmas(const Vector& sigmas, bool smart) { if (smart) { size_t n = sigmas.size(); - if (n==0) goto full; + if (n == 0) return Diagonal::shared_ptr(new Diagonal(sigmas)); + // look for zeros to make a constraint - for (size_t j=0; j< n; ++j) - if (sigmas(j)<1e-8) - return Constrained::MixedSigmas(sigmas); + if ((sigmas.array() < 1e-8).any()) { + return Constrained::MixedSigmas(sigmas); + } + // check whether all the same entry - for (size_t j = 1; j < n; j++) - if (sigmas(j) != sigmas(0)) goto full; - return Isotropic::Sigma(n, sigmas(0), true); + if ((sigmas.array() == sigmas(0)).all()) { + return Isotropic::Sigma(n, sigmas(0), true); + } } - full: return Diagonal::shared_ptr(new Diagonal(sigmas)); + return Diagonal::shared_ptr(new Diagonal(sigmas)); } /* ************************************************************************* */ @@ -317,6 +317,7 @@ Diagonal::shared_ptr Diagonal::Precisions(const Vector& precisions, bool smart) { return Variances(precisions.array().inverse(), smart); } + /* ************************************************************************* */ void Diagonal::print(const string& name) const { gtsam::print(sigmas_, name + "diagonal sigmas "); From 530b0ad7422de00e1ec3ac61f736acefd83137e7 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 23 Sep 2024 14:44:46 -0700 Subject: [PATCH 313/398] Made HNF constructors like HGF --- gtsam/hybrid/HybridNonlinearFactor.cpp | 53 ++++++++++++++- gtsam/hybrid/HybridNonlinearFactor.h | 90 ++++++++++++-------------- gtsam/hybrid/hybrid.i | 10 ++- 3 files changed, 98 insertions(+), 55 deletions(-) diff --git a/gtsam/hybrid/HybridNonlinearFactor.cpp b/gtsam/hybrid/HybridNonlinearFactor.cpp index 9613233b1..a541722c4 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.cpp +++ b/gtsam/hybrid/HybridNonlinearFactor.cpp @@ -21,10 +21,57 @@ namespace gtsam { /* *******************************************************************************/ -HybridNonlinearFactor::HybridNonlinearFactor(const KeyVector& keys, +static void checkKeys(const KeyVector& continuousKeys, + std::vector& pairs) { + KeySet factor_keys_set; + for (const auto& pair : pairs) { + auto f = pair.first; + // Insert all factor continuous keys in the continuous keys set. + std::copy(f->keys().begin(), f->keys().end(), + std::inserter(factor_keys_set, factor_keys_set.end())); + } + + KeySet continuous_keys_set(continuousKeys.begin(), continuousKeys.end()); + if (continuous_keys_set != factor_keys_set) { + throw std::runtime_error( + "HybridNonlinearFactor: The specified continuous keys and the keys in " + "the factors do not match!"); + } +} + +/* *******************************************************************************/ +HybridNonlinearFactor::HybridNonlinearFactor( + const KeyVector& continuousKeys, const DiscreteKey& discreteKey, + const std::vector& factors) + : Base(continuousKeys, {discreteKey}) { + std::vector pairs; + for (auto&& f : factors) { + pairs.emplace_back(f, 0.0); + } + checkKeys(continuousKeys, pairs); + factors_ = FactorValuePairs({discreteKey}, pairs); +} + +/* *******************************************************************************/ +HybridNonlinearFactor::HybridNonlinearFactor( + const KeyVector& continuousKeys, const DiscreteKey& discreteKey, + const std::vector& factors) + : Base(continuousKeys, {discreteKey}) { + std::vector pairs; + KeySet continuous_keys_set(continuousKeys.begin(), continuousKeys.end()); + KeySet factor_keys_set; + for (auto&& [f, val] : factors) { + pairs.emplace_back(f, val); + } + checkKeys(continuousKeys, pairs); + factors_ = FactorValuePairs({discreteKey}, pairs); +} + +/* *******************************************************************************/ +HybridNonlinearFactor::HybridNonlinearFactor(const KeyVector& continuousKeys, const DiscreteKeys& discreteKeys, - const Factors& factors) - : Base(keys, discreteKeys), factors_(factors) {} + const FactorValuePairs& factors) + : Base(continuousKeys, discreteKeys), factors_(factors) {} /* *******************************************************************************/ AlgebraicDecisionTree HybridNonlinearFactor::errorTree( diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 9852602de..766467518 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -68,11 +68,11 @@ class GTSAM_EXPORT HybridNonlinearFactor : public HybridFactor { * @brief typedef for DecisionTree which has Keys as node labels and * pairs of NonlinearFactor & an arbitrary scalar as leaf nodes. */ - using Factors = DecisionTree; + using FactorValuePairs = DecisionTree; private: - /// Decision tree of Gaussian factors indexed by discrete keys. - Factors factors_; + /// Decision tree of nonlinear factors indexed by discrete keys. + FactorValuePairs factors_; /// HybridFactor method implementation. Should not be used. AlgebraicDecisionTree errorTree( @@ -82,62 +82,49 @@ class GTSAM_EXPORT HybridNonlinearFactor : public HybridFactor { } public: + /// Default constructor, mainly for serialization. HybridNonlinearFactor() = default; /** - * @brief Construct from Decision tree. + * @brief Construct a new HybridNonlinearFactor on a single discrete key, + * providing the factors for each mode m as a vector of factors ϕ_m(x). + * The value ϕ(x,m) for the factor is simply ϕ_m(x). * - * @param keys Vector of keys for continuous factors. - * @param discreteKeys Vector of discrete keys. - * @param factors Decision tree with of shared factors. + * @param continuousKeys Vector of keys for continuous factors. + * @param discreteKey The discrete key for the "mode", indexing components. + * @param factors Vector of gaussian factors, one for each mode. */ - HybridNonlinearFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, - const Factors& factors); + HybridNonlinearFactor( + const KeyVector& continuousKeys, const DiscreteKey& discreteKey, + const std::vector& factors); /** - * @brief Convenience constructor that generates the underlying factor - * decision tree for us. + * @brief Construct a new HybridNonlinearFactor on a single discrete key, + * including a scalar error value for each mode m. The factors and scalars are + * provided as a vector of pairs (ϕ_m(x), E_m). + * The value ϕ(x,m) for the factor is now ϕ_m(x) + E_m. * - * Here it is important that the vector of factors has the correct number of - * elements based on the number of discrete keys and the cardinality of the - * keys, so that the decision tree is constructed appropriately. - * - * @tparam FACTOR The type of the factor shared pointers being passed in. - * Will be typecast to NonlinearFactor shared pointers. - * @param keys Vector of keys for continuous factors. - * @param discreteKey The discrete key indexing each component factor. - * @param factors Vector of nonlinear factor and scalar pairs. - * Same size as the cardinality of discreteKey. + * @param continuousKeys Vector of keys for continuous factors. + * @param discreteKey The discrete key for the "mode", indexing components. + * @param factors Vector of gaussian factor-scalar pairs, one per mode. */ - template - HybridNonlinearFactor( - const KeyVector& keys, const DiscreteKey& discreteKey, - const std::vector, double>>& factors) - : Base(keys, {discreteKey}) { - std::vector nonlinear_factors; - KeySet continuous_keys_set(keys.begin(), keys.end()); - KeySet factor_keys_set; - for (auto&& [f, val] : factors) { - // Insert all factor continuous keys in the continuous keys set. - std::copy(f->keys().begin(), f->keys().end(), - std::inserter(factor_keys_set, factor_keys_set.end())); - - if (auto nf = std::dynamic_pointer_cast(f)) { - nonlinear_factors.emplace_back(nf, val); - } else { - throw std::runtime_error( - "Factors passed into HybridNonlinearFactor need to be nonlinear!"); - } - } - factors_ = Factors({discreteKey}, nonlinear_factors); - - if (continuous_keys_set != factor_keys_set) { - throw std::runtime_error( - "The specified continuous keys and the keys in the factors don't " - "match!"); - } - } + HybridNonlinearFactor(const KeyVector& continuousKeys, + const DiscreteKey& discreteKey, + const std::vector& factors); + /** + * @brief Construct a new HybridNonlinearFactor on a several discrete keys M, + * including a scalar error value for each assignment m. The factors and + * scalars are provided as a DecisionTree of pairs (ϕ_M(x), E_M). + * The value ϕ(x,M) for the factor is again ϕ_m(x) + E_m. + * + * @param continuousKeys A vector of keys representing continuous variables. + * @param discreteKeys Discrete variables and their cardinalities. + * @param factors The decision tree of nonlinear factor/scalar pairs. + */ + HybridNonlinearFactor(const KeyVector& continuousKeys, + const DiscreteKeys& discreteKeys, + const FactorValuePairs& factors); /** * @brief Compute error of the HybridNonlinearFactor as a tree. * @@ -196,4 +183,9 @@ class GTSAM_EXPORT HybridNonlinearFactor : public HybridFactor { const Values& continuousValues) const; }; +// traits +template <> +struct traits : public Testable { +}; + } // namespace gtsam diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index 82881ac47..d1b8fbf6d 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -246,14 +246,18 @@ class HybridNonlinearFactorGraph { #include class HybridNonlinearFactor : gtsam::HybridFactor { HybridNonlinearFactor( - const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys, - const gtsam::DecisionTree< - gtsam::Key, std::pair>& factors); + const gtsam::KeyVector& keys, const gtsam::DiscreteKey& discreteKey, + const std::vector& factors); HybridNonlinearFactor( const gtsam::KeyVector& keys, const gtsam::DiscreteKey& discreteKey, const std::vector>& factors); + HybridNonlinearFactor( + const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys, + const gtsam::DecisionTree< + gtsam::Key, std::pair>& factors); + double error(const gtsam::Values& continuousValues, const gtsam::DiscreteValues& discreteValues) const; From 7c07d802a380cfcc7fb99c09b839c13004f2d31f Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 23 Sep 2024 14:48:38 -0700 Subject: [PATCH 314/398] Clean up some Gaussian stragglers --- gtsam/hybrid/tests/testHybridGaussianFactor.cpp | 2 +- gtsam/hybrid/tests/testSerializationHybrid.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 4b664b8b4..fff8e48fb 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -73,7 +73,7 @@ TEST(HybridGaussianFactor, ConstructorVariants) { HybridGaussianFactor fromFactors({X(1), X(2)}, m1, {f10, f11}); std::vector pairs{{f10, 0.0}, {f11, 0.0}}; - HybridGaussianFactor fromPairs({X(1), X(2)}, {m1}, pairs); + HybridGaussianFactor fromPairs({X(1), X(2)}, m1, pairs); assert_equal(fromFactors, fromPairs); HybridGaussianFactor::FactorValuePairs decisionTree({m1}, pairs); diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp index 701b870f1..4bf90019b 100644 --- a/gtsam/hybrid/tests/testSerializationHybrid.cpp +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -83,7 +83,7 @@ TEST(HybridSerialization, HybridGaussianFactor) { auto b1 = Matrix::Ones(2, 1); auto f0 = std::make_shared(X(0), A, b0); auto f1 = std::make_shared(X(0), A, b1); - std::vector factors{{f0, 0.0}, {f1, 0.0}}; + std::vector factors{f0, f1}; const HybridGaussianFactor factor(continuousKeys, discreteKey, factors); From 5aa5222edb07b0cda8647a0e36196e281ba80240 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 23 Sep 2024 14:49:10 -0700 Subject: [PATCH 315/398] Apply easy constructor --- gtsam/hybrid/tests/Switching.h | 11 +--- gtsam/hybrid/tests/testHybridBayesNet.cpp | 3 +- gtsam/hybrid/tests/testHybridEstimation.cpp | 14 ++--- gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 31 +++++----- .../tests/testHybridNonlinearFactor.cpp | 47 +++++++++------ .../tests/testHybridNonlinearFactorGraph.cpp | 60 +++++++++---------- .../hybrid/tests/testHybridNonlinearISAM.cpp | 7 +-- 7 files changed, 85 insertions(+), 88 deletions(-) diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 2de8e7fc0..8836d04c8 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -161,13 +161,8 @@ struct Switching { for (size_t k = 0; k < K - 1; k++) { KeyVector keys = {X(k), X(k + 1)}; auto motion_models = motionModels(k, between_sigma); - std::vector components; - for (auto &&f : motion_models) { - components.push_back( - {std::dynamic_pointer_cast(f), 0.0}); - } nonlinearFactorGraph.emplace_shared(keys, modes[k], - components); + motion_models); } // Add measurement factors @@ -191,8 +186,8 @@ struct Switching { } // Create motion models for a given time step - static std::vector motionModels(size_t k, - double sigma = 1.0) { + static std::vector motionModels( + size_t k, double sigma = 1.0) { auto noise_model = noiseModel::Isotropic::Sigma(1, sigma); auto still = std::make_shared(X(k), X(k + 1), 0.0, noise_model), diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 42ccf1600..6e23db91e 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -391,8 +391,7 @@ TEST(HybridBayesNet, Sampling) { std::make_shared>(X(0), X(1), 1, noise_model); nfg.emplace_shared( KeyVector{X(0), X(1)}, DiscreteKey(M(0), 2), - std::vector{{zero_motion, 0.0}, - {one_motion, 0.0}}); + std::vector{zero_motion, one_motion}); DiscreteKey mode(M(0), 2); nfg.emplace_shared(mode, "1/1"); diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 6f28f639d..4899205df 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -435,8 +435,8 @@ static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() { std::make_shared>(X(0), X(1), 0, noise_model); const auto one_motion = std::make_shared>(X(0), X(1), 1, noise_model); - std::vector components = {{zero_motion, 0.0}, - {one_motion, 0.0}}; + std::vector components = {zero_motion, + one_motion}; nfg.emplace_shared(KeyVector{X(0), X(1)}, m, components); @@ -566,10 +566,8 @@ std::shared_ptr mixedVarianceFactor( [](const GaussianFactor::shared_ptr& gf) -> GaussianFactorValuePair { return {gf, 0.0}; }); - auto updated_gmf = std::make_shared( + return std::make_shared( gmf->continuousKeys(), gmf->discreteKeys(), updated_pairs); - - return updated_gmf; } /****************************************************************************/ @@ -591,8 +589,7 @@ TEST(HybridEstimation, ModeSelection) { X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_loose)), model1 = std::make_shared( X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight)); - std::vector components = {{model0, 0.0}, - {model1, 0.0}}; + std::vector components = {model0, model1}; KeyVector keys = {X(0), X(1)}; DiscreteKey modes(M(0), 2); @@ -688,8 +685,7 @@ TEST(HybridEstimation, ModeSelection2) { X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_loose)), model1 = std::make_shared>( X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_tight)); - std::vector components = {{model0, 0.0}, - {model1, 0.0}}; + std::vector components = {model0, model1}; KeyVector keys = {X(0), X(1)}; DiscreteKey modes(M(0), 2); diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index 72ab43a2a..9ce109647 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -416,12 +416,11 @@ TEST(HybridGaussianISAM, NonTrivial) { Pose2 odometry(1.0, 0.0, 0.0); KeyVector contKeys = {W(0), W(1)}; auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); - auto still = std::make_shared(W(0), W(1), Pose2(0, 0, 0), - noise_model), - moving = std::make_shared(W(0), W(1), odometry, - noise_model); - std::vector> components = { - {moving, 0.0}, {still, 0.0}}; + std::vector components; + components.emplace_back( + new PlanarMotionModel(W(0), W(1), odometry, noise_model)); // moving + components.emplace_back( + new PlanarMotionModel(W(0), W(1), Pose2(0, 0, 0), noise_model)); // still fg.emplace_shared( contKeys, gtsam::DiscreteKey(M(1), 2), components); @@ -456,11 +455,11 @@ TEST(HybridGaussianISAM, NonTrivial) { /*************** Run Round 3 ***************/ // Add odometry factor with discrete modes. contKeys = {W(1), W(2)}; - still = std::make_shared(W(1), W(2), Pose2(0, 0, 0), - noise_model); - moving = - std::make_shared(W(1), W(2), odometry, noise_model); - components = {{moving, 0.0}, {still, 0.0}}; + components.clear(); + components.emplace_back( + new PlanarMotionModel(W(1), W(2), odometry, noise_model)); // moving + components.emplace_back( + new PlanarMotionModel(W(1), W(2), Pose2(0, 0, 0), noise_model)); // still fg.emplace_shared( contKeys, gtsam::DiscreteKey(M(2), 2), components); @@ -498,11 +497,11 @@ TEST(HybridGaussianISAM, NonTrivial) { /*************** Run Round 4 ***************/ // Add odometry factor with discrete modes. contKeys = {W(2), W(3)}; - still = std::make_shared(W(2), W(3), Pose2(0, 0, 0), - noise_model); - moving = - std::make_shared(W(2), W(3), odometry, noise_model); - components = {{moving, 0.0}, {still, 0.0}}; + components.clear(); + components.emplace_back( + new PlanarMotionModel(W(2), W(3), odometry, noise_model)); // moving + components.emplace_back( + new PlanarMotionModel(W(2), W(3), Pose2(0, 0, 0), noise_model)); // still fg.emplace_shared( contKeys, gtsam::DiscreteKey(M(3), 2), components); diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp index 92efa680a..d949a1e06 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp @@ -43,24 +43,39 @@ TEST(HybridNonlinearFactor, Constructor) { HybridNonlinearFactor::iterator it = factor.begin(); CHECK(it == factor.end()); } +/* ************************************************************************* */ +namespace test_constructor { +DiscreteKey m1(1, 2); +double between0 = 0.0; +double between1 = 1.0; + +Vector1 sigmas = Vector1(1.0); +auto model = noiseModel::Diagonal::Sigmas(sigmas, false); + +auto f0 = std::make_shared>(X(1), X(2), between0, model); +auto f1 = std::make_shared>(X(1), X(2), between1, model); +} // namespace test_constructor + +/* ************************************************************************* */ +// Test simple to complex constructors... +TEST(HybridGaussianFactor, ConstructorVariants) { + using namespace test_constructor; + HybridNonlinearFactor fromFactors({X(1), X(2)}, m1, {f0, f1}); + + std::vector pairs{{f0, 0.0}, {f1, 0.0}}; + HybridNonlinearFactor fromPairs({X(1), X(2)}, m1, pairs); + assert_equal(fromFactors, fromPairs); + + HybridNonlinearFactor::FactorValuePairs decisionTree({m1}, pairs); + HybridNonlinearFactor fromDecisionTree({X(1), X(2)}, {m1}, decisionTree); + assert_equal(fromDecisionTree, fromPairs); +} /* ************************************************************************* */ // Test .print() output. TEST(HybridNonlinearFactor, Printing) { - DiscreteKey m1(1, 2); - double between0 = 0.0; - double between1 = 1.0; - - Vector1 sigmas = Vector1(1.0); - auto model = noiseModel::Diagonal::Sigmas(sigmas, false); - - auto f0 = - std::make_shared>(X(1), X(2), between0, model); - auto f1 = - std::make_shared>(X(1), X(2), between1, model); - std::vector factors{{f0, 0.0}, {f1, 0.0}}; - - HybridNonlinearFactor hybridFactor({X(1), X(2)}, {m1}, factors); + using namespace test_constructor; + HybridNonlinearFactor hybridFactor({X(1), X(2)}, {m1}, {f0, f1}); std::string expected = R"(Hybrid [x1 x2; 1] @@ -86,9 +101,7 @@ static HybridNonlinearFactor getHybridNonlinearFactor() { std::make_shared>(X(1), X(2), between0, model); auto f1 = std::make_shared>(X(1), X(2), between1, model); - std::vector factors{{f0, 0.0}, {f1, 0.0}}; - - return HybridNonlinearFactor({X(1), X(2)}, {m1}, factors); + return HybridNonlinearFactor({X(1), X(2)}, m1, {f0, f1}); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 495444c79..63a41a63a 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -115,35 +115,40 @@ TEST(HybridNonlinearFactorGraph, Resize) { EXPECT_LONGS_EQUAL(fg.size(), 0); } +/***************************************************************************/ +namespace test_motion { +KeyVector contKeys = {X(0), X(1)}; +gtsam::DiscreteKey m1(M(1), 2); +auto noise_model = noiseModel::Isotropic::Sigma(1, 1.0); +std::vector components = { + std::make_shared(X(0), X(1), 0.0, noise_model), + std::make_shared(X(0), X(1), 1.0, noise_model)}; +} // namespace test_motion + /*************************************************************************** * Test that the resize method works correctly for a * HybridGaussianFactorGraph. */ TEST(HybridGaussianFactorGraph, Resize) { - HybridNonlinearFactorGraph nhfg; + using namespace test_motion; + + HybridNonlinearFactorGraph hnfg; auto nonlinearFactor = std::make_shared>( X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1)); - nhfg.push_back(nonlinearFactor); + hnfg.push_back(nonlinearFactor); auto discreteFactor = std::make_shared(); - nhfg.push_back(discreteFactor); + hnfg.push_back(discreteFactor); - KeyVector contKeys = {X(0), X(1)}; - auto noise_model = noiseModel::Isotropic::Sigma(1, 1.0); - auto still = std::make_shared(X(0), X(1), 0.0, noise_model), - moving = std::make_shared(X(0), X(1), 1.0, noise_model); - - std::vector> components = { - {still, 0.0}, {moving, 0.0}}; - auto dcFactor = std::make_shared( - contKeys, gtsam::DiscreteKey(M(1), 2), components); - nhfg.push_back(dcFactor); + auto dcFactor = + std::make_shared(contKeys, m1, components); + hnfg.push_back(dcFactor); Values linearizationPoint; linearizationPoint.insert(X(0), 0); linearizationPoint.insert(X(1), 1); // Generate `HybridGaussianFactorGraph` by linearizing - HybridGaussianFactorGraph gfg = *nhfg.linearize(linearizationPoint); + HybridGaussianFactorGraph gfg = *hnfg.linearize(linearizationPoint); EXPECT_LONGS_EQUAL(gfg.size(), 3); @@ -156,26 +161,19 @@ TEST(HybridGaussianFactorGraph, Resize) { * continuous keys provided do not match the keys in the factors. */ TEST(HybridGaussianFactorGraph, HybridNonlinearFactor) { + using namespace test_motion; + auto nonlinearFactor = std::make_shared>( X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1)); auto discreteFactor = std::make_shared(); - auto noise_model = noiseModel::Isotropic::Sigma(1, 1.0); - auto still = std::make_shared(X(0), X(1), 0.0, noise_model), - moving = std::make_shared(X(0), X(1), 1.0, noise_model); - - std::vector> components = { - {still, 0.0}, {moving, 0.0}}; - // Check for exception when number of continuous keys are under-specified. - KeyVector contKeys = {X(0)}; - THROWS_EXCEPTION(std::make_shared( - contKeys, gtsam::DiscreteKey(M(1), 2), components)); + THROWS_EXCEPTION( + std::make_shared(KeyVector{X(0)}, m1, components)); // Check for exception when number of continuous keys are too many. - contKeys = {X(0), X(1), X(2)}; THROWS_EXCEPTION(std::make_shared( - contKeys, gtsam::DiscreteKey(M(1), 2), components)); + KeyVector{X(0), X(1), X(2)}, m1, components)); } /***************************************************************************** @@ -832,12 +830,10 @@ TEST(HybridNonlinearFactorGraph, DefaultDecisionTree) { Pose2 odometry(2.0, 0.0, 0.0); KeyVector contKeys = {X(0), X(1)}; auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); - auto still = std::make_shared(X(0), X(1), Pose2(0, 0, 0), - noise_model), - moving = std::make_shared(X(0), X(1), odometry, - noise_model); - std::vector> motion_models = - {{still, 0.0}, {moving, 0.0}}; + std::vector motion_models = { + std::make_shared(X(0), X(1), Pose2(0, 0, 0), + noise_model), + std::make_shared(X(0), X(1), odometry, noise_model)}; fg.emplace_shared( contKeys, gtsam::DiscreteKey(M(1), 2), motion_models); diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index 86e045bb0..fe81fc13e 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -439,8 +439,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { noise_model), moving = std::make_shared(W(0), W(1), odometry, noise_model); - std::vector> components = { - {moving, 0.0}, {still, 0.0}}; + std::vector components{moving, still}; fg.emplace_shared( contKeys, gtsam::DiscreteKey(M(1), 2), components); @@ -479,7 +478,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { noise_model); moving = std::make_shared(W(1), W(2), odometry, noise_model); - components = {{moving, 0.0}, {still, 0.0}}; + components = {moving, still}; fg.emplace_shared( contKeys, gtsam::DiscreteKey(M(2), 2), components); @@ -521,7 +520,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { noise_model); moving = std::make_shared(W(2), W(3), odometry, noise_model); - components = {{moving, 0.0}, {still, 0.0}}; + components = {moving, still}; fg.emplace_shared( contKeys, gtsam::DiscreteKey(M(3), 2), components); From 4c7d3b5a50f4d0144b2a2bb17b63359a87f1abe5 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 23 Sep 2024 17:46:11 -0700 Subject: [PATCH 316/398] Get rid of unreliable timing --- .../tests/testAlgebraicDecisionTree.cpp | 87 ++----------------- 1 file changed, 7 insertions(+), 80 deletions(-) diff --git a/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp b/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp index d65a9c82b..f1b661e6f 100644 --- a/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp +++ b/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp @@ -20,12 +20,9 @@ #include // make sure we have traits #include // headers first to make sure no missing headers +#include #include #include // for convert only -#define DISABLE_TIMING - -#include -#include #include using namespace std; @@ -71,16 +68,14 @@ void dot(const T& f, const string& filename) { // instrumented operators /* ************************************************************************** */ size_t muls = 0, adds = 0; -double elapsed; void resetCounts() { muls = 0; adds = 0; } void printCounts(const string& s) { #ifndef DISABLE_TIMING -cout << s << ": " << std::setw(3) << muls << " muls, " << - std::setw(3) << adds << " adds, " << 1000 * elapsed << " ms." - << endl; + cout << s << ": " << std::setw(3) << muls << " muls, " << std::setw(3) << adds + << " adds" << endl; #endif resetCounts(); } @@ -131,7 +126,8 @@ ADT create(const Signature& signature) { static size_t count = 0; const DiscreteKey& key = signature.key(); std::stringstream ss; - ss << "CPT-" << std::setw(3) << std::setfill('0') << ++count << "-" << key.first; + ss << "CPT-" << std::setw(3) << std::setfill('0') << ++count << "-" + << key.first; string DOTfile = ss.str(); dot(p, DOTfile); return p; @@ -143,8 +139,6 @@ TEST(ADT, joint) { DiscreteKey A(0, 2), S(1, 2), T(2, 2), L(3, 2), B(4, 2), E(5, 2), X(6, 2), D(7, 2); - resetCounts(); - gttic_(asiaCPTs); ADT pA = create(A % "99/1"); ADT pS = create(S % "50/50"); ADT pT = create(T | A = "99/1 95/5"); @@ -153,15 +147,9 @@ TEST(ADT, joint) { ADT pE = create((E | T, L) = "F T T T"); ADT pX = create(X | E = "95/5 2/98"); ADT pD = create((D | E, B) = "9/1 2/8 3/7 1/9"); - gttoc_(asiaCPTs); - tictoc_getNode(asiaCPTsNode, asiaCPTs); - elapsed = asiaCPTsNode->secs() + asiaCPTsNode->wall(); - tictoc_reset_(); - printCounts("Asia CPTs"); // Create joint resetCounts(); - gttic_(asiaJoint); ADT joint = pA; dot(joint, "Asia-A"); joint = apply(joint, pS, &mul); @@ -183,10 +171,6 @@ TEST(ADT, joint) { #else EXPECT_LONGS_EQUAL(508, muls); #endif - gttoc_(asiaJoint); - tictoc_getNode(asiaJointNode, asiaJoint); - elapsed = asiaJointNode->secs() + asiaJointNode->wall(); - tictoc_reset_(); printCounts("Asia joint"); // Form P(A,S,T,L) = P(A) P(S) P(T|A) P(L|S) @@ -208,8 +192,6 @@ TEST(ADT, inference) { DiscreteKey A(0, 2), D(1, 2), // B(2, 2), L(3, 2), E(4, 2), S(5, 2), T(6, 2), X(7, 2); - resetCounts(); - gttic_(infCPTs); ADT pA = create(A % "99/1"); ADT pS = create(S % "50/50"); ADT pT = create(T | A = "99/1 95/5"); @@ -218,15 +200,9 @@ TEST(ADT, inference) { ADT pE = create((E | T, L) = "F T T T"); ADT pX = create(X | E = "95/5 2/98"); ADT pD = create((D | E, B) = "9/1 2/8 3/7 1/9"); - gttoc_(infCPTs); - tictoc_getNode(infCPTsNode, infCPTs); - elapsed = infCPTsNode->secs() + infCPTsNode->wall(); - tictoc_reset_(); - // printCounts("Inference CPTs"); - // Create joint + // Create joint, note different ordering than above: different tree! resetCounts(); - gttic_(asiaProd); ADT joint = pA; dot(joint, "Joint-Product-A"); joint = apply(joint, pS, &mul); @@ -248,14 +224,9 @@ TEST(ADT, inference) { #else EXPECT_LONGS_EQUAL(508, (long)muls); // different ordering #endif - gttoc_(asiaProd); - tictoc_getNode(asiaProdNode, asiaProd); - elapsed = asiaProdNode->secs() + asiaProdNode->wall(); - tictoc_reset_(); printCounts("Asia product"); resetCounts(); - gttic_(asiaSum); ADT marginal = joint; marginal = marginal.combine(X, &add_); dot(marginal, "Joint-Sum-ADBLEST"); @@ -270,10 +241,6 @@ TEST(ADT, inference) { #else EXPECT_LONGS_EQUAL(240, (long)adds); #endif - gttoc_(asiaSum); - tictoc_getNode(asiaSumNode, asiaSum); - elapsed = asiaSumNode->secs() + asiaSumNode->wall(); - tictoc_reset_(); printCounts("Asia sum"); } @@ -282,7 +249,6 @@ TEST(ADT, factor_graph) { DiscreteKey B(0, 2), L(1, 2), E(2, 2), S(3, 2), T(4, 2), X(5, 2); resetCounts(); - gttic_(createCPTs); ADT pS = create(S % "50/50"); ADT pT = create(T % "95/5"); ADT pL = create(L | S = "99/1 90/10"); @@ -290,15 +256,10 @@ TEST(ADT, factor_graph) { ADT pX = create(X | E = "95/5 2/98"); ADT pD = create(B | E = "1/8 7/9"); ADT pB = create(B | S = "70/30 40/60"); - gttoc_(createCPTs); - tictoc_getNode(createCPTsNode, createCPTs); - elapsed = createCPTsNode->secs() + createCPTsNode->wall(); - tictoc_reset_(); - // printCounts("Create CPTs"); + printCounts("Create CPTs"); // Create joint resetCounts(); - gttic_(asiaFG); ADT fg = pS; fg = apply(fg, pT, &mul); fg = apply(fg, pL, &mul); @@ -312,14 +273,9 @@ TEST(ADT, factor_graph) { #else EXPECT_LONGS_EQUAL(188, (long)muls); #endif - gttoc_(asiaFG); - tictoc_getNode(asiaFGNode, asiaFG); - elapsed = asiaFGNode->secs() + asiaFGNode->wall(); - tictoc_reset_(); printCounts("Asia FG"); resetCounts(); - gttic_(marg); fg = fg.combine(X, &add_); dot(fg, "Marginalized-6X"); fg = fg.combine(T, &add_); @@ -335,83 +291,54 @@ TEST(ADT, factor_graph) { #else LONGS_EQUAL(62, adds); #endif - gttoc_(marg); - tictoc_getNode(margNode, marg); - elapsed = margNode->secs() + margNode->wall(); - tictoc_reset_(); printCounts("marginalize"); // BLESTX // Eliminate X resetCounts(); - gttic_(elimX); ADT fE = pX; dot(fE, "Eliminate-01-fEX"); fE = fE.combine(X, &add_); dot(fE, "Eliminate-02-fE"); - gttoc_(elimX); - tictoc_getNode(elimXNode, elimX); - elapsed = elimXNode->secs() + elimXNode->wall(); - tictoc_reset_(); printCounts("Eliminate X"); // Eliminate T resetCounts(); - gttic_(elimT); ADT fLE = pT; fLE = apply(fLE, pE, &mul); dot(fLE, "Eliminate-03-fLET"); fLE = fLE.combine(T, &add_); dot(fLE, "Eliminate-04-fLE"); - gttoc_(elimT); - tictoc_getNode(elimTNode, elimT); - elapsed = elimTNode->secs() + elimTNode->wall(); - tictoc_reset_(); printCounts("Eliminate T"); // Eliminate S resetCounts(); - gttic_(elimS); ADT fBL = pS; fBL = apply(fBL, pL, &mul); fBL = apply(fBL, pB, &mul); dot(fBL, "Eliminate-05-fBLS"); fBL = fBL.combine(S, &add_); dot(fBL, "Eliminate-06-fBL"); - gttoc_(elimS); - tictoc_getNode(elimSNode, elimS); - elapsed = elimSNode->secs() + elimSNode->wall(); - tictoc_reset_(); printCounts("Eliminate S"); // Eliminate E resetCounts(); - gttic_(elimE); ADT fBL2 = fE; fBL2 = apply(fBL2, fLE, &mul); fBL2 = apply(fBL2, pD, &mul); dot(fBL2, "Eliminate-07-fBLE"); fBL2 = fBL2.combine(E, &add_); dot(fBL2, "Eliminate-08-fBL2"); - gttoc_(elimE); - tictoc_getNode(elimENode, elimE); - elapsed = elimENode->secs() + elimENode->wall(); - tictoc_reset_(); printCounts("Eliminate E"); // Eliminate L resetCounts(); - gttic_(elimL); ADT fB = fBL; fB = apply(fB, fBL2, &mul); dot(fB, "Eliminate-09-fBL"); fB = fB.combine(L, &add_); dot(fB, "Eliminate-10-fB"); - gttoc_(elimL); - tictoc_getNode(elimLNode, elimL); - elapsed = elimLNode->secs() + elimLNode->wall(); - tictoc_reset_(); printCounts("Eliminate L"); } From 502101100e733bb4218cab8e53ca07d6a4812f8b Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 23 Sep 2024 17:53:43 -0700 Subject: [PATCH 317/398] Split test --- .../tests/testAlgebraicDecisionTree.cpp | 36 +++++++++++-------- 1 file changed, 22 insertions(+), 14 deletions(-) diff --git a/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp b/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp index f1b661e6f..ffb1f0b5a 100644 --- a/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp +++ b/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp @@ -133,20 +133,25 @@ ADT create(const Signature& signature) { return p; } +/* ************************************************************************* */ +namespace asiaCPTs { +DiscreteKey A(0, 2), S(1, 2), T(2, 2), L(3, 2), B(4, 2), E(5, 2), X(6, 2), + D(7, 2); + +ADT pA = create(A % "99/1"); +ADT pS = create(S % "50/50"); +ADT pT = create(T | A = "99/1 95/5"); +ADT pL = create(L | S = "99/1 90/10"); +ADT pB = create(B | S = "70/30 40/60"); +ADT pE = create((E | T, L) = "F T T T"); +ADT pX = create(X | E = "95/5 2/98"); +ADT pD = create((D | E, B) = "9/1 2/8 3/7 1/9"); +} // namespace asiaCPTs + /* ************************************************************************* */ // test Asia Joint TEST(ADT, joint) { - DiscreteKey A(0, 2), S(1, 2), T(2, 2), L(3, 2), B(4, 2), E(5, 2), X(6, 2), - D(7, 2); - - ADT pA = create(A % "99/1"); - ADT pS = create(S % "50/50"); - ADT pT = create(T | A = "99/1 95/5"); - ADT pL = create(L | S = "99/1 90/10"); - ADT pB = create(B | S = "70/30 40/60"); - ADT pE = create((E | T, L) = "F T T T"); - ADT pX = create(X | E = "95/5 2/98"); - ADT pD = create((D | E, B) = "9/1 2/8 3/7 1/9"); + using namespace asiaCPTs; // Create joint resetCounts(); @@ -172,6 +177,11 @@ TEST(ADT, joint) { EXPECT_LONGS_EQUAL(508, muls); #endif printCounts("Asia joint"); +} + +/* ************************************************************************* */ +TEST(ADT, combine) { + using namespace asiaCPTs; // Form P(A,S,T,L) = P(A) P(S) P(T|A) P(L|S) ADT pASTL = pA; @@ -187,7 +197,7 @@ TEST(ADT, joint) { } /* ************************************************************************* */ -// test Inference with joint +// test Inference with joint, created using different ordering TEST(ADT, inference) { DiscreteKey A(0, 2), D(1, 2), // B(2, 2), L(3, 2), E(4, 2), S(5, 2), T(6, 2), X(7, 2); @@ -248,7 +258,6 @@ TEST(ADT, inference) { TEST(ADT, factor_graph) { DiscreteKey B(0, 2), L(1, 2), E(2, 2), S(3, 2), T(4, 2), X(5, 2); - resetCounts(); ADT pS = create(S % "50/50"); ADT pT = create(T % "95/5"); ADT pL = create(L | S = "99/1 90/10"); @@ -256,7 +265,6 @@ TEST(ADT, factor_graph) { ADT pX = create(X | E = "95/5 2/98"); ADT pD = create(B | E = "1/8 7/9"); ADT pB = create(B | S = "70/30 40/60"); - printCounts("Create CPTs"); // Create joint resetCounts(); From 2757ca4fbee5026dfea5359e7a56a6958e1ae327 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 23 Sep 2024 17:55:01 -0700 Subject: [PATCH 318/398] Make sure label is formatted for Choice --- gtsam/discrete/DecisionTree-inl.h | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/gtsam/discrete/DecisionTree-inl.h b/gtsam/discrete/DecisionTree-inl.h index 06161c2e1..27e98fcde 100644 --- a/gtsam/discrete/DecisionTree-inl.h +++ b/gtsam/discrete/DecisionTree-inl.h @@ -91,7 +91,7 @@ namespace gtsam { void dot(std::ostream& os, const LabelFormatter& labelFormatter, const ValueFormatter& valueFormatter, bool showZero) const override { - std::string value = valueFormatter(constant_); + const std::string value = valueFormatter(constant_); if (showZero || value.compare("0")) os << "\"" << this->id() << "\" [label=\"" << value << "\", shape=box, rank=sink, height=0.35, fixedsize=true]\n"; @@ -306,7 +306,8 @@ namespace gtsam { void dot(std::ostream& os, const LabelFormatter& labelFormatter, const ValueFormatter& valueFormatter, bool showZero) const override { - os << "\"" << this->id() << "\" [shape=circle, label=\"" << label_ + const std::string label = labelFormatter(label_); + os << "\"" << this->id() << "\" [shape=circle, label=\"" << label << "\"]\n"; size_t B = branches_.size(); for (size_t i = 0; i < B; i++) { From 52acceebc93d18ff7bdc9bf018454f86aff6f280 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 23 Sep 2024 18:35:19 -0700 Subject: [PATCH 319/398] Some formatting --- .../discrete/tests/testDecisionTreeFactor.cpp | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/gtsam/discrete/tests/testDecisionTreeFactor.cpp b/gtsam/discrete/tests/testDecisionTreeFactor.cpp index d764da7bf..e6501c3f7 100644 --- a/gtsam/discrete/tests/testDecisionTreeFactor.cpp +++ b/gtsam/discrete/tests/testDecisionTreeFactor.cpp @@ -22,7 +22,10 @@ #include #include #include +#include #include +#include +#include using namespace std; using namespace gtsam; @@ -33,25 +36,24 @@ TEST(DecisionTreeFactor, ConstructorsMatch) { DiscreteKey X(0, 2), Y(1, 3); // Create with vector and with string - const std::vector table {2, 5, 3, 6, 4, 7}; + const std::vector table{2, 5, 3, 6, 4, 7}; DecisionTreeFactor f1({X, Y}, table); DecisionTreeFactor f2({X, Y}, "2 5 3 6 4 7"); EXPECT(assert_equal(f1, f2)); } /* ************************************************************************* */ -TEST( DecisionTreeFactor, constructors) -{ +TEST(DecisionTreeFactor, constructors) { // Declare a bunch of keys - DiscreteKey X(0,2), Y(1,3), Z(2,2); + DiscreteKey X(0, 2), Y(1, 3), Z(2, 2); // Create factors DecisionTreeFactor f1(X, {2, 8}); DecisionTreeFactor f2(X & Y, "2 5 3 6 4 7"); DecisionTreeFactor f3(X & Y & Z, "2 5 3 6 4 7 25 55 35 65 45 75"); - EXPECT_LONGS_EQUAL(1,f1.size()); - EXPECT_LONGS_EQUAL(2,f2.size()); - EXPECT_LONGS_EQUAL(3,f3.size()); + EXPECT_LONGS_EQUAL(1, f1.size()); + EXPECT_LONGS_EQUAL(2, f2.size()); + EXPECT_LONGS_EQUAL(3, f3.size()); DiscreteValues x121{{0, 1}, {1, 2}, {2, 1}}; EXPECT_DOUBLES_EQUAL(8, f1(x121), 1e-9); @@ -70,7 +72,7 @@ TEST( DecisionTreeFactor, constructors) /* ************************************************************************* */ TEST(DecisionTreeFactor, Error) { // Declare a bunch of keys - DiscreteKey X(0,2), Y(1,3), Z(2,2); + DiscreteKey X(0, 2), Y(1, 3), Z(2, 2); // Create factors DecisionTreeFactor f(X & Y & Z, "2 5 3 6 4 7 25 55 35 65 45 75"); @@ -104,9 +106,8 @@ TEST(DecisionTreeFactor, multiplication) { } /* ************************************************************************* */ -TEST( DecisionTreeFactor, sum_max) -{ - DiscreteKey v0(0,3), v1(1,2); +TEST(DecisionTreeFactor, sum_max) { + DiscreteKey v0(0, 3), v1(1, 2); DecisionTreeFactor f1(v0 & v1, "1 2 3 4 5 6"); DecisionTreeFactor expected(v1, "9 12"); @@ -165,10 +166,9 @@ TEST(DecisionTreeFactor, Prune) { "0.0 0.0 0.0 0.60658897 0.61241912 0.61241969 0.61247685 0.61247742 0.0 " "0.0 0.0 0.99995287 1.0 1.0 1.0 1.0"); - DecisionTreeFactor expected3( - D & C & B & A, - "0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 " - "0.999952870000 1.0 1.0 1.0 1.0"); + DecisionTreeFactor expected3(D & C & B & A, + "0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 " + "0.999952870000 1.0 1.0 1.0 1.0"); maxNrAssignments = 5; auto pruned3 = factor.prune(maxNrAssignments); EXPECT(assert_equal(expected3, pruned3)); @@ -180,7 +180,7 @@ TEST(DecisionTreeFactor, DotWithNames) { DecisionTreeFactor f(A & B, "1 2 3 4 5 6"); auto formatter = [](Key key) { return key == 12 ? "A" : "B"; }; - for (bool showZero:{true, false}) { + for (bool showZero : {true, false}) { string actual = f.dot(formatter, showZero); // pretty weak test, as ids are pointers and not stable across platforms. string expected = "digraph G {"; From ae8d79cb3cc3c623ee919077173a84bbf6b5be94 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 23 Sep 2024 18:35:36 -0700 Subject: [PATCH 320/398] Add test for combine --- .../discrete/tests/testDecisionTreeFactor.cpp | 64 +++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/gtsam/discrete/tests/testDecisionTreeFactor.cpp b/gtsam/discrete/tests/testDecisionTreeFactor.cpp index e6501c3f7..a41d06c2b 100644 --- a/gtsam/discrete/tests/testDecisionTreeFactor.cpp +++ b/gtsam/discrete/tests/testDecisionTreeFactor.cpp @@ -174,6 +174,70 @@ TEST(DecisionTreeFactor, Prune) { EXPECT(assert_equal(expected3, pruned3)); } +/* ************************************************************************** */ +// Asia Bayes Network +/* ************************************************************************** */ + +#define DISABLE_DOT + +void maybeSaveDotFile(const DecisionTreeFactor& f, const string& filename) { +#ifndef DISABLE_DOT + std::vector names = {"A", "S", "T", "L", "B", "E", "X", "D"}; + auto formatter = [&](Key key) { return names[key]; }; + f.dot(filename, formatter, true); +#endif +} + +/** Convert Signature into CPT */ +DecisionTreeFactor create(const Signature& signature) { + DecisionTreeFactor p(signature.discreteKeys(), signature.cpt()); + return p; +} + +/* ************************************************************************* */ +// test Asia Joint +TEST(DecisionTreeFactor, joint) { + DiscreteKey A(0, 2), S(1, 2), T(2, 2), L(3, 2), B(4, 2), E(5, 2), X(6, 2), + D(7, 2); + + gttic_(asiaCPTs); + DecisionTreeFactor pA = create(A % "99/1"); + DecisionTreeFactor pS = create(S % "50/50"); + DecisionTreeFactor pT = create(T | A = "99/1 95/5"); + DecisionTreeFactor pL = create(L | S = "99/1 90/10"); + DecisionTreeFactor pB = create(B | S = "70/30 40/60"); + DecisionTreeFactor pE = create((E | T, L) = "F T T T"); + DecisionTreeFactor pX = create(X | E = "95/5 2/98"); + DecisionTreeFactor pD = create((D | E, B) = "9/1 2/8 3/7 1/9"); + + // Create joint + gttic_(asiaJoint); + DecisionTreeFactor joint = pA; + maybeSaveDotFile(joint, "Asia-A"); + joint = joint * pS; + maybeSaveDotFile(joint, "Asia-AS"); + joint = joint * pT; + maybeSaveDotFile(joint, "Asia-AST"); + joint = joint * pL; + maybeSaveDotFile(joint, "Asia-ASTL"); + joint = joint * pB; + maybeSaveDotFile(joint, "Asia-ASTLB"); + joint = joint * pE; + maybeSaveDotFile(joint, "Asia-ASTLBE"); + joint = joint * pX; + maybeSaveDotFile(joint, "Asia-ASTLBEX"); + joint = joint * pD; + maybeSaveDotFile(joint, "Asia-ASTLBEXD"); + + // Check that discrete keys are as expected + EXPECT(assert_equal(joint.discreteKeys(), {A, S, T, L, B, E, X, D})); + + // Check that summing out variables maintains the keys even if merged, as is + // the case with S. + auto noAB = joint.sum(Ordering{A.first, B.first}); + EXPECT(assert_equal(noAB->discreteKeys(), {S, T, L, E, X, D})); +} + /* ************************************************************************* */ TEST(DecisionTreeFactor, DotWithNames) { DiscreteKey A(12, 3), B(5, 2); From 74c40556e312379b6b686fea51a4885b7a4382ef Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 23 Sep 2024 18:36:00 -0700 Subject: [PATCH 321/398] Make key generation more efficient --- gtsam/discrete/DecisionTreeFactor.cpp | 30 +++++++++++++-------------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/gtsam/discrete/DecisionTreeFactor.cpp b/gtsam/discrete/DecisionTreeFactor.cpp index c56818448..d1b68f4bf 100644 --- a/gtsam/discrete/DecisionTreeFactor.cpp +++ b/gtsam/discrete/DecisionTreeFactor.cpp @@ -147,14 +147,14 @@ namespace gtsam { size_t i; ADT result(*this); for (i = 0; i < nrFrontals; i++) { - Key j = keys()[i]; + Key j = keys_[i]; result = result.combine(j, cardinality(j), op); } - // create new factor, note we start keys after nrFrontals + // Create new factor, note we start with keys after nrFrontals: DiscreteKeys dkeys; - for (; i < keys().size(); i++) { - Key j = keys()[i]; + for (; i < keys_.size(); i++) { + Key j = keys_[i]; dkeys.push_back(DiscreteKey(j, cardinality(j))); } return std::make_shared(dkeys, result); @@ -179,24 +179,22 @@ namespace gtsam { result = result.combine(j, cardinality(j), op); } - // create new factor, note we collect keys that are not in frontalKeys /* - Due to branch merging, the labels in `result` may be missing some keys + Create new factor, note we collect keys that are not in frontalKeys. + + Due to branch merging, the labels in `result` may be missing some keys. E.g. After branch merging, we may get a ADT like: Leaf [2] 1.0204082 - This is missing the key values used for branching. + Hence, code below traverses the original keys and omits those in + frontalKeys. We loop over cardinalities, which is O(n) even for a map, and + then "contains" is a binary search on a small vector. */ - KeyVector difference, frontalKeys_(frontalKeys), keys_(keys()); - // Get the difference of the frontalKeys and the factor keys using set_difference - std::sort(keys_.begin(), keys_.end()); - std::sort(frontalKeys_.begin(), frontalKeys_.end()); - std::set_difference(keys_.begin(), keys_.end(), frontalKeys_.begin(), - frontalKeys_.end(), back_inserter(difference)); - DiscreteKeys dkeys; - for (Key key : difference) { - dkeys.push_back(DiscreteKey(key, cardinality(key))); + for (auto&& [key, cardinality] : cardinalities_) { + if (!frontalKeys.contains(key)) { + dkeys.push_back(DiscreteKey(key, cardinality)); + } } return std::make_shared(dkeys, result); } From 2682cde25995630452f06ceb3ce32361ba056cb7 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 23 Sep 2024 22:57:05 -0400 Subject: [PATCH 322/398] re-add the gotos --- gtsam/linear/NoiseModel.cpp | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/gtsam/linear/NoiseModel.cpp b/gtsam/linear/NoiseModel.cpp index 7cb93b17a..18a29f5b3 100644 --- a/gtsam/linear/NoiseModel.cpp +++ b/gtsam/linear/NoiseModel.cpp @@ -286,10 +286,10 @@ Diagonal::shared_ptr Diagonal::Variances(const Vector& variances, bool smart) { // check whether all the same entry if ((variances.array() == variances(0)).all()) { return Isotropic::Variance(variances.size(), variances(0), true); - } else { - return shared_ptr(new Diagonal(variances.cwiseSqrt())); - } + } else + goto full; } +full: return shared_ptr(new Diagonal(variances.cwiseSqrt())); } @@ -297,7 +297,7 @@ Diagonal::shared_ptr Diagonal::Variances(const Vector& variances, bool smart) { Diagonal::shared_ptr Diagonal::Sigmas(const Vector& sigmas, bool smart) { if (smart) { size_t n = sigmas.size(); - if (n == 0) return Diagonal::shared_ptr(new Diagonal(sigmas)); + if (n == 0) goto full; // look for zeros to make a constraint if ((sigmas.array() < 1e-8).any()) { @@ -309,6 +309,7 @@ Diagonal::shared_ptr Diagonal::Sigmas(const Vector& sigmas, bool smart) { return Isotropic::Sigma(n, sigmas(0), true); } } +full: return Diagonal::shared_ptr(new Diagonal(sigmas)); } From 82c25d887c21c31f3d4478732a6688c92ce11754 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Tue, 24 Sep 2024 11:23:50 -0700 Subject: [PATCH 323/398] Address comments on PR #1843 (cherry picked from commit 7e587e4eb396cadf67799e968cb0cf153af9750a) --- gtsam/hybrid/HybridNonlinearFactor.cpp | 9 ++------- gtsam/hybrid/HybridNonlinearFactor.h | 4 ++-- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/gtsam/hybrid/HybridNonlinearFactor.cpp b/gtsam/hybrid/HybridNonlinearFactor.cpp index a541722c4..f5be6ded8 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.cpp +++ b/gtsam/hybrid/HybridNonlinearFactor.cpp @@ -22,7 +22,7 @@ namespace gtsam { /* *******************************************************************************/ static void checkKeys(const KeyVector& continuousKeys, - std::vector& pairs) { + const std::vector& pairs) { KeySet factor_keys_set; for (const auto& pair : pairs) { auto f = pair.first; @@ -55,14 +55,9 @@ HybridNonlinearFactor::HybridNonlinearFactor( /* *******************************************************************************/ HybridNonlinearFactor::HybridNonlinearFactor( const KeyVector& continuousKeys, const DiscreteKey& discreteKey, - const std::vector& factors) + const std::vector& pairs) : Base(continuousKeys, {discreteKey}) { - std::vector pairs; KeySet continuous_keys_set(continuousKeys.begin(), continuousKeys.end()); - KeySet factor_keys_set; - for (auto&& [f, val] : factors) { - pairs.emplace_back(f, val); - } checkKeys(continuousKeys, pairs); factors_ = FactorValuePairs({discreteKey}, pairs); } diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 766467518..7843afc83 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -106,11 +106,11 @@ class GTSAM_EXPORT HybridNonlinearFactor : public HybridFactor { * * @param continuousKeys Vector of keys for continuous factors. * @param discreteKey The discrete key for the "mode", indexing components. - * @param factors Vector of gaussian factor-scalar pairs, one per mode. + * @param pairs Vector of gaussian factor-scalar pairs, one per mode. */ HybridNonlinearFactor(const KeyVector& continuousKeys, const DiscreteKey& discreteKey, - const std::vector& factors); + const std::vector& pairs); /** * @brief Construct a new HybridNonlinearFactor on a several discrete keys M, From 6190e3d13ffec5b7435fd9e3d62a2a94a9d6fb9f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 25 Sep 2024 11:03:15 -0400 Subject: [PATCH 324/398] Revert "Fix compile error on Clang 18, tweak for better type generation" This reverts commit e3dd4e1704bc3a2cdcd3856adc151f38bdbd7407. --- gtsam/inference/Factor.h | 1 - python/CMakeLists.txt | 4 ++-- python/gtsam/gtsam.tpl | 6 +++--- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/gtsam/inference/Factor.h b/gtsam/inference/Factor.h index f073c4975..e357a9c88 100644 --- a/gtsam/inference/Factor.h +++ b/gtsam/inference/Factor.h @@ -25,7 +25,6 @@ #include #endif #include -#include #include #include diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 674f17d3f..404f5ecc0 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -257,7 +257,7 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON) COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${GTSAM_PYTHON_BUILD_DIRECTORY}/$ENV{PYTHONPATH}" - pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" --numpy-array-use-type-var gtsam_unstable + pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" gtsam_unstable DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} ${GTSAM_PYTHON_UNSTABLE_TARGET} WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/" ) @@ -284,7 +284,7 @@ add_custom_target( COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${GTSAM_PYTHON_BUILD_DIRECTORY}/$ENV{PYTHONPATH}" - pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" --numpy-array-use-type-var gtsam + pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" gtsam DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} ${GTSAM_PYTHON_TARGET} WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/" ) diff --git a/python/gtsam/gtsam.tpl b/python/gtsam/gtsam.tpl index 6876b4ab4..c72a216a2 100644 --- a/python/gtsam/gtsam.tpl +++ b/python/gtsam/gtsam.tpl @@ -39,12 +39,12 @@ namespace py = pybind11; {module_def} {{ m_.doc() = "pybind11 wrapper of {module_name}"; -// Specializations for STL classes -#include "python/gtsam/specializations/{module_name}.h" - {submodules_init} {wrapped_namespace} +// Specializations for STL classes +#include "python/gtsam/specializations/{module_name}.h" + }} From 92c7f51db53abeb27755e46fa0bd9a04013fb5d0 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 25 Sep 2024 11:06:51 -0400 Subject: [PATCH 325/398] Revert "Use pybind11-stubgen for generating stubs" This reverts commit 30c789dbdb54bacc8ded04b557416a7911b01229. --- python/CMakeLists.txt | 51 +++++++++++-------------------------- python/dev_requirements.txt | 2 +- 2 files changed, 16 insertions(+), 37 deletions(-) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 404f5ecc0..43f8d78bd 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -171,7 +171,6 @@ file(COPY "${GTSAM_SOURCE_DIR}/examples/Data" DESTINATION "${GTSAM_MODULE_PATH}" # Add gtsam as a dependency to the install target set(GTSAM_PYTHON_DEPENDENCIES ${GTSAM_PYTHON_TARGET}) -set(GTSAM_PYTHON_INSTALL_EXTRA "") if(GTSAM_UNSTABLE_BUILD_PYTHON) set(ignore @@ -251,22 +250,6 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON) VERBATIM ) endif() - - add_custom_target( - python-unstable-stubs - COMMAND - ${CMAKE_COMMAND} -E env - "PYTHONPATH=${GTSAM_PYTHON_BUILD_DIRECTORY}/$ENV{PYTHONPATH}" - pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" gtsam_unstable - DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} ${GTSAM_PYTHON_UNSTABLE_TARGET} - WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/" - ) - - if(NOT WIN32) - # Add the stubgen target as a dependency to the install target - list(APPEND GTSAM_PYTHON_INSTALL_EXTRA python-unstable-stubs) - endif() - # Custom make command to run all GTSAM_UNSTABLE Python tests add_custom_target( python-test-unstable @@ -279,30 +262,26 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON) ) endif() -add_custom_target( - python-stubs - COMMAND - ${CMAKE_COMMAND} -E env - "PYTHONPATH=${GTSAM_PYTHON_BUILD_DIRECTORY}/$ENV{PYTHONPATH}" - pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" gtsam - DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} ${GTSAM_PYTHON_TARGET} - WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/" -) - -if(NOT WIN32) - # Add the stubgen target as a dependency to the install target - list(APPEND GTSAM_PYTHON_INSTALL_EXTRA python-stubs) -endif() - # Add custom target so we can install with `make python-install` # Note below we make sure to install with --user iff not in a virtualenv set(GTSAM_PYTHON_INSTALL_TARGET python-install) - -add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} - COMMAND ${PYTHON_EXECUTABLE} -c "import sys, subprocess; cmd = [sys.executable, '-m', 'pip', 'install']; has_venv = hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix); cmd.append('--user' if not has_venv else ''); cmd.append('.'); subprocess.check_call([c for c in cmd if c])" - DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_INSTALL_EXTRA} +#TODO(Varun) Maybe move the long command to script? +# https://stackoverflow.com/questions/49053544/how-do-i-run-a-python-script-every-time-in-a-cmake-build +if (NOT WIN32) # WIN32=1 is target platform is Windows + add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} + COMMAND stubgen -q -p gtsam && cp -a out/gtsam/ gtsam && ${PYTHON_EXECUTABLE} -c "import sys, subprocess; cmd = [sys.executable, '-m', 'pip', 'install']; has_venv = hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix); cmd.append('--user' if not has_venv else ''); cmd.append('.'); subprocess.check_call([c for c in cmd if c])" + DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY} VERBATIM) +else() + #TODO(Varun) Find equivalent cp on Windows + add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} + COMMAND ${PYTHON_EXECUTABLE} -c "import sys, subprocess; cmd = [sys.executable, '-m', 'pip', 'install']; has_venv = hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix); cmd.append('--user' if not has_venv else ''); cmd.append('.'); subprocess.check_call([c for c in cmd if c])" + DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} + WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY} + VERBATIM) +endif() + # Custom make command to run all GTSAM Python tests add_custom_target( diff --git a/python/dev_requirements.txt b/python/dev_requirements.txt index 94ab40189..6a5e7f924 100644 --- a/python/dev_requirements.txt +++ b/python/dev_requirements.txt @@ -1,3 +1,3 @@ -r requirements.txt pyparsing>=2.4.2 -pybind11-stubgen>=2.5.1 \ No newline at end of file +mypy==1.4.1 #TODO(Varun) A bug in mypy>=1.5.0 causes an unresolved placeholder error when importing numpy>=2.0.0 (https://github.com/python/mypy/issues/17396) \ No newline at end of file From 798db544a4dc90e3b10610ee2ca1e5d3b8e661b5 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 25 Sep 2024 11:08:00 -0400 Subject: [PATCH 326/398] keep include --- gtsam/inference/Factor.h | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/gtsam/inference/Factor.h b/gtsam/inference/Factor.h index e357a9c88..074151e16 100644 --- a/gtsam/inference/Factor.h +++ b/gtsam/inference/Factor.h @@ -24,12 +24,13 @@ #ifdef GTSAM_ENABLE_BOOST_SERIALIZATION #include #endif -#include - -#include #include +#include #include +#include +#include + namespace gtsam { /// Define collection types: From 832ef7725bd2ce0aacd1b2e570614a77a5ec59cf Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 25 Sep 2024 11:28:38 -0400 Subject: [PATCH 327/398] wrap TriangulationFactor --- gtsam/slam/slam.i | 43 +++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 41 insertions(+), 2 deletions(-) diff --git a/gtsam/slam/slam.i b/gtsam/slam/slam.i index f054aaab4..97dcfcae7 100644 --- a/gtsam/slam/slam.i +++ b/gtsam/slam/slam.i @@ -347,11 +347,50 @@ virtual class FrobeniusBetweenFactor : gtsam::NoiseModelFactor { gtsam::Vector evaluateError(const T& R1, const T& R2); }; - + +#include +template +virtual class TriangulationFactor : gtsam::NoiseModelFactor { + TriangulationFactor(); + TriangulationFactor(const CAMERA& camera, const gtsam::This::Measurement& measured, + const gtsam::noiseModel::Base* model, gtsam::Key pointKey, + bool throwCheirality = false, + bool verboseCheirality = false); + + void print(const string& s = "", const gtsam::KeyFormatter& keyFormatter = + gtsam::DefaultKeyFormatter) const; + bool equals(const This& p, double tol = 1e-9) const; + + gtsam::Vector evaluateError(const gtsam::Point3& point) const; + + const gtsam::This::Measurement& measured() const; +}; +typedef gtsam::TriangulationFactor> + TriangulationFactorCal3_S2; +typedef gtsam::TriangulationFactor> + TriangulationFactorCal3DS2; +typedef gtsam::TriangulationFactor> + TriangulationFactorCal3Bundler; +typedef gtsam::TriangulationFactor> + TriangulationFactorCal3Fisheye; +typedef gtsam::TriangulationFactor> + TriangulationFactorCal3Unified; + +typedef gtsam::TriangulationFactor> + TriangulationFactorPoseCal3_S2; +typedef gtsam::TriangulationFactor> + TriangulationFactorPoseCal3DS2; +typedef gtsam::TriangulationFactor> + TriangulationFactorPoseCal3Bundler; +typedef gtsam::TriangulationFactor> + TriangulationFactorPoseCal3Fisheye; +typedef gtsam::TriangulationFactor> + TriangulationFactorPoseCal3Unified; + #include namespace lago { gtsam::Values initialize(const gtsam::NonlinearFactorGraph& graph, bool useOdometricPath = true); gtsam::Values initialize(const gtsam::NonlinearFactorGraph& graph, const gtsam::Values& initialGuess); } - + } // namespace gtsam From e2fba7a85cf4cbd9b652768becc495b4db0b1d00 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 25 Sep 2024 11:28:54 -0400 Subject: [PATCH 328/398] expose Measurement type in TriangulationFactor --- gtsam/slam/TriangulationFactor.h | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/gtsam/slam/TriangulationFactor.h b/gtsam/slam/TriangulationFactor.h index 32aa590a6..3b8486a59 100644 --- a/gtsam/slam/TriangulationFactor.h +++ b/gtsam/slam/TriangulationFactor.h @@ -34,6 +34,8 @@ public: /// CAMERA type using Camera = CAMERA; + /// shorthand for measurement type, e.g. Point2 or StereoPoint2 + using Measurement = typename CAMERA::Measurement; protected: @@ -43,9 +45,6 @@ protected: /// shorthand for this class using This = TriangulationFactor; - /// shorthand for measurement type, e.g. Point2 or StereoPoint2 - using Measurement = typename CAMERA::Measurement; - // Keep a copy of measurement and calibration for I/O const CAMERA camera_; ///< CAMERA in which this landmark was seen const Measurement measured_; ///< 2D measurement From 9243655e3b612a94892c44c7a8193d53de74e689 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 25 Sep 2024 14:13:14 -0400 Subject: [PATCH 329/398] simplify --- gtsam/linear/NoiseModel.cpp | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/gtsam/linear/NoiseModel.cpp b/gtsam/linear/NoiseModel.cpp index 18a29f5b3..7ee2158be 100644 --- a/gtsam/linear/NoiseModel.cpp +++ b/gtsam/linear/NoiseModel.cpp @@ -282,15 +282,10 @@ Diagonal::Diagonal(const Vector& sigmas) /* ************************************************************************* */ Diagonal::shared_ptr Diagonal::Variances(const Vector& variances, bool smart) { - if (smart) { - // check whether all the same entry - if ((variances.array() == variances(0)).all()) { - return Isotropic::Variance(variances.size(), variances(0), true); - } else - goto full; - } -full: - return shared_ptr(new Diagonal(variances.cwiseSqrt())); + // check whether all the same entry + return (smart && (variances.array() == variances(0)).all()) + ? Isotropic::Variance(variances.size(), variances(0), true) + : shared_ptr(new Diagonal(variances.cwiseSqrt())); } /* ************************************************************************* */ From 569eeb9202c3e15be73857ac684956efea211f72 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 25 Sep 2024 16:45:06 -0400 Subject: [PATCH 330/398] update Config.cmake.in to conditionally include Boost --- cmake/Config.cmake.in | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/cmake/Config.cmake.in b/cmake/Config.cmake.in index 36906d090..338ff8500 100644 --- a/cmake/Config.cmake.in +++ b/cmake/Config.cmake.in @@ -15,10 +15,12 @@ endif() # Find dependencies, required by cmake exported targets: include(CMakeFindDependencyMacro) # Allow using cmake < 3.8 -if(${CMAKE_VERSION} VERSION_LESS "3.8.0") -find_package(Boost @BOOST_FIND_MINIMUM_VERSION@ COMPONENTS @BOOST_FIND_MINIMUM_COMPONENTS@) -else() -find_dependency(Boost @BOOST_FIND_MINIMUM_VERSION@ COMPONENTS @BOOST_FIND_MINIMUM_COMPONENTS@) +if (@GTSAM_ENABLE_BOOST_SERIALIZATION@ OR @GTSAM_USE_BOOST_FEATURES@) + if(${CMAKE_VERSION} VERSION_LESS "3.8.0") + find_package(Boost @BOOST_FIND_MINIMUM_VERSION@ COMPONENTS @BOOST_FIND_MINIMUM_COMPONENTS@) + else() + find_dependency(Boost @BOOST_FIND_MINIMUM_VERSION@ COMPONENTS @BOOST_FIND_MINIMUM_COMPONENTS@) + endif() endif() if(@GTSAM_USE_TBB@) From 06887b702ab9f03852e7126b223135dd071b88c0 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 25 Sep 2024 16:10:24 -0700 Subject: [PATCH 331/398] Moved mixture model tests to its own file --- gtsam/hybrid/tests/testGaussianMixture.cpp | 239 ++++++++++++++++++ .../hybrid/tests/testHybridGaussianFactor.cpp | 187 -------------- 2 files changed, 239 insertions(+), 187 deletions(-) create mode 100644 gtsam/hybrid/tests/testGaussianMixture.cpp diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp new file mode 100644 index 000000000..7f2bd99f4 --- /dev/null +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -0,0 +1,239 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridGaussianFactor.cpp + * @brief Unit tests for HybridGaussianFactor + * @author Varun Agrawal + * @author Fan Jiang + * @author Frank Dellaert + * @date December 2021 + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// Include for test suite +#include + +#include + +using namespace std; +using namespace gtsam; +using symbol_shorthand::M; +using symbol_shorthand::X; +using symbol_shorthand::Z; + +namespace test_gmm { + +/** + * Function to compute P(m=1|z). For P(m=0|z), swap mus and sigmas. + * If sigma0 == sigma1, it simplifies to a sigmoid function. + * + * Follows equation 7.108 since it is more generic. + */ +double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, + double z) { + double x1 = ((z - mu0) / sigma0), x2 = ((z - mu1) / sigma1); + double d = sigma1 / sigma0; + double e = d * std::exp(-0.5 * (x1 * x1 - x2 * x2)); + return 1 / (1 + e); +}; + +static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, + double sigma0, double sigma1) { + DiscreteKey m(M(0), 2); + Key z = Z(0); + + auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); + auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); + + auto c0 = make_shared(z, Vector1(mu0), I_1x1, model0), + c1 = make_shared(z, Vector1(mu1), I_1x1, model1); + + HybridBayesNet hbn; + DiscreteKeys discreteParents{m}; + hbn.emplace_shared( + KeyVector{z}, KeyVector{}, discreteParents, + HybridGaussianConditional::Conditionals(discreteParents, + std::vector{c0, c1})); + + auto mixing = make_shared(m, "50/50"); + hbn.push_back(mixing); + + return hbn; +} + +} // namespace test_gmm + +/* ************************************************************************* */ +/** + * Test a simple Gaussian Mixture Model represented as P(m)P(z|m) + * where m is a discrete variable and z is a continuous variable. + * m is binary and depending on m, we have 2 different means + * μ1 and μ2 for the Gaussian distribution around which we sample z. + * + * The resulting factor graph should eliminate to a Bayes net + * which represents a sigmoid function. + */ +TEST(HybridGaussianFactor, GaussianMixtureModel) { + using namespace test_gmm; + + double mu0 = 1.0, mu1 = 3.0; + double sigma = 2.0; + + DiscreteKey m(M(0), 2); + Key z = Z(0); + + auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma, sigma); + + // The result should be a sigmoid. + // So should be P(m=1|z) = 0.5 at z=3.0 - 1.0=2.0 + double midway = mu1 - mu0, lambda = 4; + { + VectorValues given; + given.insert(z, Vector1(midway)); + + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + EXPECT_DOUBLES_EQUAL( + prob_m_z(mu0, mu1, sigma, sigma, midway), + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), + 1e-8); + + // At the halfway point between the means, we should get P(m|z)=0.5 + HybridBayesNet expected; + expected.emplace_shared(m, "50/50"); + + EXPECT(assert_equal(expected, *bn)); + } + { + // Shift by -lambda + VectorValues given; + given.insert(z, Vector1(midway - lambda)); + + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + EXPECT_DOUBLES_EQUAL( + prob_m_z(mu0, mu1, sigma, sigma, midway - lambda), + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), + 1e-8); + } + { + // Shift by lambda + VectorValues given; + given.insert(z, Vector1(midway + lambda)); + + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + EXPECT_DOUBLES_EQUAL( + prob_m_z(mu0, mu1, sigma, sigma, midway + lambda), + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), + 1e-8); + } +} + +/* ************************************************************************* */ +/** + * Test a simple Gaussian Mixture Model represented as P(m)P(z|m) + * where m is a discrete variable and z is a continuous variable. + * m is binary and depending on m, we have 2 different means + * and covariances each for the + * Gaussian distribution around which we sample z. + * + * The resulting factor graph should eliminate to a Bayes net + * which represents a Gaussian-like function + * where m1>m0 close to 3.1333. + */ +TEST(HybridGaussianFactor, GaussianMixtureModel2) { + using namespace test_gmm; + + double mu0 = 1.0, mu1 = 3.0; + double sigma0 = 8.0, sigma1 = 4.0; + + DiscreteKey m(M(0), 2); + Key z = Z(0); + + auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma0, sigma1); + + double m1_high = 3.133, lambda = 4; + { + // The result should be a bell curve like function + // with m1 > m0 close to 3.1333. + // We get 3.1333 by finding the maximum value of the function. + VectorValues given; + given.insert(z, Vector1(3.133)); + + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + EXPECT_DOUBLES_EQUAL( + prob_m_z(mu0, mu1, sigma0, sigma1, m1_high), + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{M(0), 1}}), 1e-8); + + // At the halfway point between the means + HybridBayesNet expected; + expected.emplace_shared( + m, DiscreteKeys{}, + vector{prob_m_z(mu1, mu0, sigma1, sigma0, m1_high), + prob_m_z(mu0, mu1, sigma0, sigma1, m1_high)}); + + EXPECT(assert_equal(expected, *bn)); + } + { + // Shift by -lambda + VectorValues given; + given.insert(z, Vector1(m1_high - lambda)); + + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + EXPECT_DOUBLES_EQUAL( + prob_m_z(mu0, mu1, sigma0, sigma1, m1_high - lambda), + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), + 1e-8); + } + { + // Shift by lambda + VectorValues given; + given.insert(z, Vector1(m1_high + lambda)); + + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + EXPECT_DOUBLES_EQUAL( + prob_m_z(mu0, mu1, sigma0, sigma1, m1_high + lambda), + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), + 1e-8); + } +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index fff8e48fb..55c3bccfb 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -213,193 +213,6 @@ TEST(HybridGaussianFactor, Error) { 4.0, hybridFactor.error({continuousValues, discreteValues}), 1e-9); } -namespace test_gmm { - -/** - * Function to compute P(m=1|z). For P(m=0|z), swap mus and sigmas. - * If sigma0 == sigma1, it simplifies to a sigmoid function. - * - * Follows equation 7.108 since it is more generic. - */ -double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, - double z) { - double x1 = ((z - mu0) / sigma0), x2 = ((z - mu1) / sigma1); - double d = sigma1 / sigma0; - double e = d * std::exp(-0.5 * (x1 * x1 - x2 * x2)); - return 1 / (1 + e); -}; - -static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, - double sigma0, double sigma1) { - DiscreteKey m(M(0), 2); - Key z = Z(0); - - auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); - auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); - - auto c0 = make_shared(z, Vector1(mu0), I_1x1, model0), - c1 = make_shared(z, Vector1(mu1), I_1x1, model1); - - HybridBayesNet hbn; - DiscreteKeys discreteParents{m}; - hbn.emplace_shared( - KeyVector{z}, KeyVector{}, discreteParents, - HybridGaussianConditional::Conditionals(discreteParents, - std::vector{c0, c1})); - - auto mixing = make_shared(m, "50/50"); - hbn.push_back(mixing); - - return hbn; -} - -} // namespace test_gmm - -/* ************************************************************************* */ -/** - * Test a simple Gaussian Mixture Model represented as P(m)P(z|m) - * where m is a discrete variable and z is a continuous variable. - * m is binary and depending on m, we have 2 different means - * μ1 and μ2 for the Gaussian distribution around which we sample z. - * - * The resulting factor graph should eliminate to a Bayes net - * which represents a sigmoid function. - */ -TEST(HybridGaussianFactor, GaussianMixtureModel) { - using namespace test_gmm; - - double mu0 = 1.0, mu1 = 3.0; - double sigma = 2.0; - - DiscreteKey m(M(0), 2); - Key z = Z(0); - - auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma, sigma); - - // The result should be a sigmoid. - // So should be P(m=1|z) = 0.5 at z=3.0 - 1.0=2.0 - double midway = mu1 - mu0, lambda = 4; - { - VectorValues given; - given.insert(z, Vector1(midway)); - - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - - EXPECT_DOUBLES_EQUAL( - prob_m_z(mu0, mu1, sigma, sigma, midway), - bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), - 1e-8); - - // At the halfway point between the means, we should get P(m|z)=0.5 - HybridBayesNet expected; - expected.emplace_shared(m, "50/50"); - - EXPECT(assert_equal(expected, *bn)); - } - { - // Shift by -lambda - VectorValues given; - given.insert(z, Vector1(midway - lambda)); - - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - - EXPECT_DOUBLES_EQUAL( - prob_m_z(mu0, mu1, sigma, sigma, midway - lambda), - bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), - 1e-8); - } - { - // Shift by lambda - VectorValues given; - given.insert(z, Vector1(midway + lambda)); - - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - - EXPECT_DOUBLES_EQUAL( - prob_m_z(mu0, mu1, sigma, sigma, midway + lambda), - bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), - 1e-8); - } -} - -/* ************************************************************************* */ -/** - * Test a simple Gaussian Mixture Model represented as P(m)P(z|m) - * where m is a discrete variable and z is a continuous variable. - * m is binary and depending on m, we have 2 different means - * and covariances each for the - * Gaussian distribution around which we sample z. - * - * The resulting factor graph should eliminate to a Bayes net - * which represents a Gaussian-like function - * where m1>m0 close to 3.1333. - */ -TEST(HybridGaussianFactor, GaussianMixtureModel2) { - using namespace test_gmm; - - double mu0 = 1.0, mu1 = 3.0; - double sigma0 = 8.0, sigma1 = 4.0; - - DiscreteKey m(M(0), 2); - Key z = Z(0); - - auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma0, sigma1); - - double m1_high = 3.133, lambda = 4; - { - // The result should be a bell curve like function - // with m1 > m0 close to 3.1333. - // We get 3.1333 by finding the maximum value of the function. - VectorValues given; - given.insert(z, Vector1(3.133)); - - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - - EXPECT_DOUBLES_EQUAL( - prob_m_z(mu0, mu1, sigma0, sigma1, m1_high), - bn->at(0)->asDiscrete()->operator()(DiscreteValues{{M(0), 1}}), 1e-8); - - // At the halfway point between the means - HybridBayesNet expected; - expected.emplace_shared( - m, DiscreteKeys{}, - vector{prob_m_z(mu1, mu0, sigma1, sigma0, m1_high), - prob_m_z(mu0, mu1, sigma0, sigma1, m1_high)}); - - EXPECT(assert_equal(expected, *bn)); - } - { - // Shift by -lambda - VectorValues given; - given.insert(z, Vector1(m1_high - lambda)); - - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - - EXPECT_DOUBLES_EQUAL( - prob_m_z(mu0, mu1, sigma0, sigma1, m1_high - lambda), - bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), - 1e-8); - } - { - // Shift by lambda - VectorValues given; - given.insert(z, Vector1(m1_high + lambda)); - - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - - EXPECT_DOUBLES_EQUAL( - prob_m_z(mu0, mu1, sigma0, sigma1, m1_high + lambda), - bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), - 1e-8); - } -} - namespace test_two_state_estimation { DiscreteKey m1(M(1), 2); From 314a8310cfea1eabc432c78731171a727496d738 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 25 Sep 2024 16:55:12 -0700 Subject: [PATCH 332/398] Simplify tests --- gtsam/hybrid/tests/testGaussianMixture.cpp | 192 ++++++--------------- 1 file changed, 53 insertions(+), 139 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index 7f2bd99f4..e976994f8 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -44,39 +44,39 @@ using symbol_shorthand::M; using symbol_shorthand::X; using symbol_shorthand::Z; -namespace test_gmm { - /** - * Function to compute P(m=1|z). For P(m=0|z), swap mus and sigmas. + * Closed form computation of P(m=1|z). * If sigma0 == sigma1, it simplifies to a sigmoid function. - * - * Follows equation 7.108 since it is more generic. */ -double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, - double z) { +static double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, + double z) { double x1 = ((z - mu0) / sigma0), x2 = ((z - mu1) / sigma1); double d = sigma1 / sigma0; double e = d * std::exp(-0.5 * (x1 * x1 - x2 * x2)); return 1 / (1 + e); }; +// Define mode key and an assignment m==1 +static const DiscreteKey m(M(0), 2); +static const DiscreteValues m1Assignment{{M(0), 1}}; + +/** + * Create a simple Gaussian Mixture Model represented as p(z|m)P(m) + * where m is a discrete variable and z is a continuous variable. + * The "mode" m is binary and depending on m, we have 2 different means + * μ1 and μ2 for the Gaussian density p(z|m). + */ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, double sigma0, double sigma1) { - DiscreteKey m(M(0), 2); - Key z = Z(0); - auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); - auto c0 = make_shared(z, Vector1(mu0), I_1x1, model0), - c1 = make_shared(z, Vector1(mu1), I_1x1, model1); + auto c0 = make_shared(Z(0), Vector1(mu0), I_1x1, model0), + c1 = make_shared(Z(0), Vector1(mu1), I_1x1, model1); HybridBayesNet hbn; - DiscreteKeys discreteParents{m}; - hbn.emplace_shared( - KeyVector{z}, KeyVector{}, discreteParents, - HybridGaussianConditional::Conditionals(discreteParents, - std::vector{c0, c1})); + hbn.emplace_shared(KeyVector{Z(0)}, KeyVector{}, m, + std::vector{c0, c1}); auto mixing = make_shared(m, "50/50"); hbn.push_back(mixing); @@ -84,150 +84,64 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, return hbn; } -} // namespace test_gmm +/// Given p(z,m) and z, use eliminate to obtain P(m|z). +static DiscreteConditional solveForMeasurement(const HybridBayesNet &hbn, + double z) { + VectorValues given; + given.insert(Z(0), Vector1(z)); -/* ************************************************************************* */ -/** - * Test a simple Gaussian Mixture Model represented as P(m)P(z|m) - * where m is a discrete variable and z is a continuous variable. - * m is binary and depending on m, we have 2 different means - * μ1 and μ2 for the Gaussian distribution around which we sample z. - * - * The resulting factor graph should eliminate to a Bayes net - * which represents a sigmoid function. + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + return *gfg.eliminateSequential()->at(0)->asDiscrete(); +} + +/* + * Test a Gaussian Mixture Model P(m)p(z|m) with same sigma. + * The posterior, as a function of z, should be a sigmoid function. */ TEST(HybridGaussianFactor, GaussianMixtureModel) { - using namespace test_gmm; - double mu0 = 1.0, mu1 = 3.0; double sigma = 2.0; - DiscreteKey m(M(0), 2); - Key z = Z(0); - auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma, sigma); - // The result should be a sigmoid. - // So should be P(m=1|z) = 0.5 at z=3.0 - 1.0=2.0 - double midway = mu1 - mu0, lambda = 4; - { - VectorValues given; - given.insert(z, Vector1(midway)); + // At the halfway point between the means, we should get P(m|z)=0.5 + double midway = mu1 - mu0; + auto pMid = solveForMeasurement(hbn, midway); + EXPECT(assert_equal(DiscreteConditional(m, "50/50"), pMid)); - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + // Everywhere else, the result should be a sigmoid. + for (const double shift : {-4, -2, 0, 2, 4}) { + const double z = midway + shift; + const double expected = prob_m_z(mu0, mu1, sigma, sigma, z); - EXPECT_DOUBLES_EQUAL( - prob_m_z(mu0, mu1, sigma, sigma, midway), - bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), - 1e-8); - - // At the halfway point between the means, we should get P(m|z)=0.5 - HybridBayesNet expected; - expected.emplace_shared(m, "50/50"); - - EXPECT(assert_equal(expected, *bn)); - } - { - // Shift by -lambda - VectorValues given; - given.insert(z, Vector1(midway - lambda)); - - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - - EXPECT_DOUBLES_EQUAL( - prob_m_z(mu0, mu1, sigma, sigma, midway - lambda), - bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), - 1e-8); - } - { - // Shift by lambda - VectorValues given; - given.insert(z, Vector1(midway + lambda)); - - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - - EXPECT_DOUBLES_EQUAL( - prob_m_z(mu0, mu1, sigma, sigma, midway + lambda), - bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), - 1e-8); + auto posterior = solveForMeasurement(hbn, z); + EXPECT_DOUBLES_EQUAL(expected, posterior(m1Assignment), 1e-8); } } -/* ************************************************************************* */ -/** - * Test a simple Gaussian Mixture Model represented as P(m)P(z|m) - * where m is a discrete variable and z is a continuous variable. - * m is binary and depending on m, we have 2 different means - * and covariances each for the - * Gaussian distribution around which we sample z. - * - * The resulting factor graph should eliminate to a Bayes net - * which represents a Gaussian-like function - * where m1>m0 close to 3.1333. +/* + * Test a Gaussian Mixture Model P(m)p(z|m) with different sigmas. + * The posterior, as a function of z, should be a unimodal function. */ TEST(HybridGaussianFactor, GaussianMixtureModel2) { - using namespace test_gmm; - double mu0 = 1.0, mu1 = 3.0; double sigma0 = 8.0, sigma1 = 4.0; - DiscreteKey m(M(0), 2); - Key z = Z(0); - auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma0, sigma1); - double m1_high = 3.133, lambda = 4; - { - // The result should be a bell curve like function - // with m1 > m0 close to 3.1333. - // We get 3.1333 by finding the maximum value of the function. - VectorValues given; - given.insert(z, Vector1(3.133)); + // We get zMax=3.1333 by finding the maximum value of the function, at which + // point the mode m==1 is about twice as probable as m==0. + double zMax = 3.133; + auto pMax = solveForMeasurement(hbn, zMax); + EXPECT(assert_equal(DiscreteConditional(m, "32.56/67.44"), pMax, 1e-5)); - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + // Everywhere else, the result should be a bell curve like function. + for (const double shift : {-4, -2, 0, 2, 4}) { + const double z = zMax + shift; + const double expected = prob_m_z(mu0, mu1, sigma0, sigma1, z); - EXPECT_DOUBLES_EQUAL( - prob_m_z(mu0, mu1, sigma0, sigma1, m1_high), - bn->at(0)->asDiscrete()->operator()(DiscreteValues{{M(0), 1}}), 1e-8); - - // At the halfway point between the means - HybridBayesNet expected; - expected.emplace_shared( - m, DiscreteKeys{}, - vector{prob_m_z(mu1, mu0, sigma1, sigma0, m1_high), - prob_m_z(mu0, mu1, sigma0, sigma1, m1_high)}); - - EXPECT(assert_equal(expected, *bn)); - } - { - // Shift by -lambda - VectorValues given; - given.insert(z, Vector1(m1_high - lambda)); - - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - - EXPECT_DOUBLES_EQUAL( - prob_m_z(mu0, mu1, sigma0, sigma1, m1_high - lambda), - bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), - 1e-8); - } - { - // Shift by lambda - VectorValues given; - given.insert(z, Vector1(m1_high + lambda)); - - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - - EXPECT_DOUBLES_EQUAL( - prob_m_z(mu0, mu1, sigma0, sigma1, m1_high + lambda), - bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), - 1e-8); + auto posterior = solveForMeasurement(hbn, z); + EXPECT_DOUBLES_EQUAL(expected, posterior(m1Assignment), 1e-8); } } From a4a0a2a4247bb6ba31b2b6b47d744e457c3b8e1d Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 25 Sep 2024 18:48:01 -0700 Subject: [PATCH 333/398] Test different workflows --- gtsam/hybrid/tests/testGaussianMixture.cpp | 122 +++++++++++---------- 1 file changed, 67 insertions(+), 55 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index e976994f8..c6263c44d 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -10,56 +10,35 @@ * -------------------------------------------------------------------------- */ /** - * @file testHybridGaussianFactor.cpp - * @brief Unit tests for HybridGaussianFactor + * @file testGaussianMixture.cpp + * @brief test hybrid elimination with a simple mixture model * @author Varun Agrawal - * @author Fan Jiang * @author Frank Dellaert - * @date December 2021 + * @date September 2024 */ -#include -#include #include -#include #include -#include -#include -#include -#include #include -#include -#include -#include -#include // Include for test suite #include -#include - -using namespace std; using namespace gtsam; using symbol_shorthand::M; -using symbol_shorthand::X; using symbol_shorthand::Z; -/** - * Closed form computation of P(m=1|z). - * If sigma0 == sigma1, it simplifies to a sigmoid function. - */ -static double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, - double z) { - double x1 = ((z - mu0) / sigma0), x2 = ((z - mu1) / sigma1); - double d = sigma1 / sigma0; - double e = d * std::exp(-0.5 * (x1 * x1 - x2 * x2)); - return 1 / (1 + e); -}; - // Define mode key and an assignment m==1 static const DiscreteKey m(M(0), 2); static const DiscreteValues m1Assignment{{M(0), 1}}; +// Define a 50/50 prior on the mode +DiscreteConditional::shared_ptr mixing = + std::make_shared(m, "60/40"); + +// define Continuous keys +static const KeyVector continuousKeys{Z(0)}; + /** * Create a simple Gaussian Mixture Model represented as p(z|m)P(m) * where m is a discrete variable and z is a continuous variable. @@ -68,30 +47,45 @@ static const DiscreteValues m1Assignment{{M(0), 1}}; */ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, double sigma0, double sigma1) { + HybridBayesNet hbn; auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); - - auto c0 = make_shared(Z(0), Vector1(mu0), I_1x1, model0), - c1 = make_shared(Z(0), Vector1(mu1), I_1x1, model1); - - HybridBayesNet hbn; - hbn.emplace_shared(KeyVector{Z(0)}, KeyVector{}, m, + auto c0 = std::make_shared(Z(0), Vector1(mu0), I_1x1, + model0), + c1 = std::make_shared(Z(0), Vector1(mu1), I_1x1, + model1); + hbn.emplace_shared(continuousKeys, KeyVector{}, m, std::vector{c0, c1}); - - auto mixing = make_shared(m, "50/50"); hbn.push_back(mixing); - return hbn; } -/// Given p(z,m) and z, use eliminate to obtain P(m|z). -static DiscreteConditional solveForMeasurement(const HybridBayesNet &hbn, - double z) { - VectorValues given; - given.insert(Z(0), Vector1(z)); +/// Gaussian density function +double Gaussian(double mu, double sigma, double z) { + return exp(-0.5 * pow((z - mu) / sigma, 2)) / sqrt(2 * M_PI * sigma * sigma); +}; - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - return *gfg.eliminateSequential()->at(0)->asDiscrete(); +/** + * Closed form computation of P(m=1|z). + * If sigma0 == sigma1, it simplifies to a sigmoid function. + * Hardcodes 60/40 prior on mode. + */ +static double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, + double z) { + const double p0 = 0.6 * Gaussian(mu0, sigma0, z); + const double p1 = 0.4 * Gaussian(mu1, sigma1, z); + return p1 / (p0 + p1); +}; + +/// Given \phi(m;z)\phi(m) use eliminate to obtain P(m|z). +static DiscreteConditional solveHFG(const HybridGaussianFactorGraph &hfg) { + return *hfg.eliminateSequential()->at(0)->asDiscrete(); +} + +/// Given p(z,m) and z, convert to HFG and solve. +static DiscreteConditional solveHBN(const HybridBayesNet &hbn, double z) { + VectorValues given{{Z(0), Vector1(z)}}; + return solveHFG(hbn.toFactorGraph(given)); } /* @@ -106,16 +100,25 @@ TEST(HybridGaussianFactor, GaussianMixtureModel) { // At the halfway point between the means, we should get P(m|z)=0.5 double midway = mu1 - mu0; - auto pMid = solveForMeasurement(hbn, midway); - EXPECT(assert_equal(DiscreteConditional(m, "50/50"), pMid)); + auto pMid = solveHBN(hbn, midway); + EXPECT(assert_equal(DiscreteConditional(m, "60/40"), pMid)); // Everywhere else, the result should be a sigmoid. for (const double shift : {-4, -2, 0, 2, 4}) { const double z = midway + shift; const double expected = prob_m_z(mu0, mu1, sigma, sigma, z); - auto posterior = solveForMeasurement(hbn, z); - EXPECT_DOUBLES_EQUAL(expected, posterior(m1Assignment), 1e-8); + // Workflow 1: convert HBN to HFG and solve + auto posterior1 = solveHBN(hbn, z); + EXPECT_DOUBLES_EQUAL(expected, posterior1(m1Assignment), 1e-8); + + // Workflow 2: directly specify HFG and solve + HybridGaussianFactorGraph hfg1; + hfg1.emplace_shared( + m, std::vector{Gaussian(mu0, sigma, z), Gaussian(mu1, sigma, z)}); + hfg1.push_back(mixing); + auto posterior2 = solveHFG(hfg1); + EXPECT_DOUBLES_EQUAL(expected, posterior2(m1Assignment), 1e-8); } } @@ -132,16 +135,25 @@ TEST(HybridGaussianFactor, GaussianMixtureModel2) { // We get zMax=3.1333 by finding the maximum value of the function, at which // point the mode m==1 is about twice as probable as m==0. double zMax = 3.133; - auto pMax = solveForMeasurement(hbn, zMax); - EXPECT(assert_equal(DiscreteConditional(m, "32.56/67.44"), pMax, 1e-5)); + auto pMax = solveHBN(hbn, zMax); + EXPECT(assert_equal(DiscreteConditional(m, "42/58"), pMax, 1e-4)); // Everywhere else, the result should be a bell curve like function. for (const double shift : {-4, -2, 0, 2, 4}) { const double z = zMax + shift; const double expected = prob_m_z(mu0, mu1, sigma0, sigma1, z); - auto posterior = solveForMeasurement(hbn, z); - EXPECT_DOUBLES_EQUAL(expected, posterior(m1Assignment), 1e-8); + // Workflow 1: convert HBN to HFG and solve + auto posterior1 = solveHBN(hbn, z); + EXPECT_DOUBLES_EQUAL(expected, posterior1(m1Assignment), 1e-8); + + // Workflow 2: directly specify HFG and solve + HybridGaussianFactorGraph hfg; + hfg.emplace_shared( + m, std::vector{Gaussian(mu0, sigma0, z), Gaussian(mu1, sigma1, z)}); + hfg.push_back(mixing); + auto posterior2 = solveHFG(hfg); + EXPECT_DOUBLES_EQUAL(expected, posterior2(m1Assignment), 1e-8); } } From 4d10c1462b1d7966071384b4b877c50e4d187ad6 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 25 Sep 2024 18:54:53 -0700 Subject: [PATCH 334/398] Include what you use --- gtsam/hybrid/tests/testGaussianMixture.cpp | 49 ++++++++++++---------- 1 file changed, 28 insertions(+), 21 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index c6263c44d..be4ba2ff4 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -11,15 +11,22 @@ /** * @file testGaussianMixture.cpp - * @brief test hybrid elimination with a simple mixture model + * @brief Test hybrid elimination with a simple mixture model * @author Varun Agrawal * @author Frank Dellaert * @date September 2024 */ +#include #include +#include #include +#include +#include +#include #include +#include +#include // Include for test suite #include @@ -29,15 +36,15 @@ using symbol_shorthand::M; using symbol_shorthand::Z; // Define mode key and an assignment m==1 -static const DiscreteKey m(M(0), 2); -static const DiscreteValues m1Assignment{{M(0), 1}}; +const DiscreteKey m(M(0), 2); +const DiscreteValues m1Assignment{{M(0), 1}}; // Define a 50/50 prior on the mode DiscreteConditional::shared_ptr mixing = std::make_shared(m, "60/40"); // define Continuous keys -static const KeyVector continuousKeys{Z(0)}; +const KeyVector continuousKeys{Z(0)}; /** * Create a simple Gaussian Mixture Model represented as p(z|m)P(m) @@ -45,8 +52,8 @@ static const KeyVector continuousKeys{Z(0)}; * The "mode" m is binary and depending on m, we have 2 different means * μ1 and μ2 for the Gaussian density p(z|m). */ -static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, - double sigma0, double sigma1) { +HybridBayesNet GaussianMixtureModel(double mu0, double mu1, double sigma0, + double sigma1) { HybridBayesNet hbn; auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); @@ -70,37 +77,37 @@ double Gaussian(double mu, double sigma, double z) { * If sigma0 == sigma1, it simplifies to a sigmoid function. * Hardcodes 60/40 prior on mode. */ -static double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, - double z) { +double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, + double z) { const double p0 = 0.6 * Gaussian(mu0, sigma0, z); const double p1 = 0.4 * Gaussian(mu1, sigma1, z); return p1 / (p0 + p1); }; /// Given \phi(m;z)\phi(m) use eliminate to obtain P(m|z). -static DiscreteConditional solveHFG(const HybridGaussianFactorGraph &hfg) { +DiscreteConditional SolveHFG(const HybridGaussianFactorGraph &hfg) { return *hfg.eliminateSequential()->at(0)->asDiscrete(); } /// Given p(z,m) and z, convert to HFG and solve. -static DiscreteConditional solveHBN(const HybridBayesNet &hbn, double z) { +DiscreteConditional SolveHBN(const HybridBayesNet &hbn, double z) { VectorValues given{{Z(0), Vector1(z)}}; - return solveHFG(hbn.toFactorGraph(given)); + return SolveHFG(hbn.toFactorGraph(given)); } /* * Test a Gaussian Mixture Model P(m)p(z|m) with same sigma. * The posterior, as a function of z, should be a sigmoid function. */ -TEST(HybridGaussianFactor, GaussianMixtureModel) { +TEST(GaussianMixture, GaussianMixtureModel) { double mu0 = 1.0, mu1 = 3.0; double sigma = 2.0; - auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma, sigma); + auto hbn = GaussianMixtureModel(mu0, mu1, sigma, sigma); // At the halfway point between the means, we should get P(m|z)=0.5 double midway = mu1 - mu0; - auto pMid = solveHBN(hbn, midway); + auto pMid = SolveHBN(hbn, midway); EXPECT(assert_equal(DiscreteConditional(m, "60/40"), pMid)); // Everywhere else, the result should be a sigmoid. @@ -109,7 +116,7 @@ TEST(HybridGaussianFactor, GaussianMixtureModel) { const double expected = prob_m_z(mu0, mu1, sigma, sigma, z); // Workflow 1: convert HBN to HFG and solve - auto posterior1 = solveHBN(hbn, z); + auto posterior1 = SolveHBN(hbn, z); EXPECT_DOUBLES_EQUAL(expected, posterior1(m1Assignment), 1e-8); // Workflow 2: directly specify HFG and solve @@ -117,7 +124,7 @@ TEST(HybridGaussianFactor, GaussianMixtureModel) { hfg1.emplace_shared( m, std::vector{Gaussian(mu0, sigma, z), Gaussian(mu1, sigma, z)}); hfg1.push_back(mixing); - auto posterior2 = solveHFG(hfg1); + auto posterior2 = SolveHFG(hfg1); EXPECT_DOUBLES_EQUAL(expected, posterior2(m1Assignment), 1e-8); } } @@ -126,16 +133,16 @@ TEST(HybridGaussianFactor, GaussianMixtureModel) { * Test a Gaussian Mixture Model P(m)p(z|m) with different sigmas. * The posterior, as a function of z, should be a unimodal function. */ -TEST(HybridGaussianFactor, GaussianMixtureModel2) { +TEST(GaussianMixture, GaussianMixtureModel2) { double mu0 = 1.0, mu1 = 3.0; double sigma0 = 8.0, sigma1 = 4.0; - auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma0, sigma1); + auto hbn = GaussianMixtureModel(mu0, mu1, sigma0, sigma1); // We get zMax=3.1333 by finding the maximum value of the function, at which // point the mode m==1 is about twice as probable as m==0. double zMax = 3.133; - auto pMax = solveHBN(hbn, zMax); + auto pMax = SolveHBN(hbn, zMax); EXPECT(assert_equal(DiscreteConditional(m, "42/58"), pMax, 1e-4)); // Everywhere else, the result should be a bell curve like function. @@ -144,7 +151,7 @@ TEST(HybridGaussianFactor, GaussianMixtureModel2) { const double expected = prob_m_z(mu0, mu1, sigma0, sigma1, z); // Workflow 1: convert HBN to HFG and solve - auto posterior1 = solveHBN(hbn, z); + auto posterior1 = SolveHBN(hbn, z); EXPECT_DOUBLES_EQUAL(expected, posterior1(m1Assignment), 1e-8); // Workflow 2: directly specify HFG and solve @@ -152,7 +159,7 @@ TEST(HybridGaussianFactor, GaussianMixtureModel2) { hfg.emplace_shared( m, std::vector{Gaussian(mu0, sigma0, z), Gaussian(mu1, sigma1, z)}); hfg.push_back(mixing); - auto posterior2 = solveHFG(hfg); + auto posterior2 = SolveHFG(hfg); EXPECT_DOUBLES_EQUAL(expected, posterior2(m1Assignment), 1e-8); } } From e023360843362eb8df83c786835c0207737501e4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 26 Sep 2024 11:50:58 -0400 Subject: [PATCH 335/398] Revert "Revert "Use pybind11-stubgen for generating stubs"" This reverts commit 92c7f51db53abeb27755e46fa0bd9a04013fb5d0. --- python/CMakeLists.txt | 53 ++++++++++++++++++++++++++----------- python/dev_requirements.txt | 2 +- 2 files changed, 38 insertions(+), 17 deletions(-) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 43f8d78bd..404f5ecc0 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -171,6 +171,7 @@ file(COPY "${GTSAM_SOURCE_DIR}/examples/Data" DESTINATION "${GTSAM_MODULE_PATH}" # Add gtsam as a dependency to the install target set(GTSAM_PYTHON_DEPENDENCIES ${GTSAM_PYTHON_TARGET}) +set(GTSAM_PYTHON_INSTALL_EXTRA "") if(GTSAM_UNSTABLE_BUILD_PYTHON) set(ignore @@ -250,6 +251,22 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON) VERBATIM ) endif() + + add_custom_target( + python-unstable-stubs + COMMAND + ${CMAKE_COMMAND} -E env + "PYTHONPATH=${GTSAM_PYTHON_BUILD_DIRECTORY}/$ENV{PYTHONPATH}" + pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" gtsam_unstable + DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} ${GTSAM_PYTHON_UNSTABLE_TARGET} + WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/" + ) + + if(NOT WIN32) + # Add the stubgen target as a dependency to the install target + list(APPEND GTSAM_PYTHON_INSTALL_EXTRA python-unstable-stubs) + endif() + # Custom make command to run all GTSAM_UNSTABLE Python tests add_custom_target( python-test-unstable @@ -262,26 +279,30 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON) ) endif() +add_custom_target( + python-stubs + COMMAND + ${CMAKE_COMMAND} -E env + "PYTHONPATH=${GTSAM_PYTHON_BUILD_DIRECTORY}/$ENV{PYTHONPATH}" + pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" gtsam + DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} ${GTSAM_PYTHON_TARGET} + WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/" +) + +if(NOT WIN32) + # Add the stubgen target as a dependency to the install target + list(APPEND GTSAM_PYTHON_INSTALL_EXTRA python-stubs) +endif() + # Add custom target so we can install with `make python-install` # Note below we make sure to install with --user iff not in a virtualenv set(GTSAM_PYTHON_INSTALL_TARGET python-install) -#TODO(Varun) Maybe move the long command to script? -# https://stackoverflow.com/questions/49053544/how-do-i-run-a-python-script-every-time-in-a-cmake-build -if (NOT WIN32) # WIN32=1 is target platform is Windows - add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} - COMMAND stubgen -q -p gtsam && cp -a out/gtsam/ gtsam && ${PYTHON_EXECUTABLE} -c "import sys, subprocess; cmd = [sys.executable, '-m', 'pip', 'install']; has_venv = hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix); cmd.append('--user' if not has_venv else ''); cmd.append('.'); subprocess.check_call([c for c in cmd if c])" - DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} - WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY} - VERBATIM) -else() - #TODO(Varun) Find equivalent cp on Windows - add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} - COMMAND ${PYTHON_EXECUTABLE} -c "import sys, subprocess; cmd = [sys.executable, '-m', 'pip', 'install']; has_venv = hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix); cmd.append('--user' if not has_venv else ''); cmd.append('.'); subprocess.check_call([c for c in cmd if c])" - DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} - WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY} - VERBATIM) -endif() +add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET} + COMMAND ${PYTHON_EXECUTABLE} -c "import sys, subprocess; cmd = [sys.executable, '-m', 'pip', 'install']; has_venv = hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix); cmd.append('--user' if not has_venv else ''); cmd.append('.'); subprocess.check_call([c for c in cmd if c])" + DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_INSTALL_EXTRA} + WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY} + VERBATIM) # Custom make command to run all GTSAM Python tests add_custom_target( diff --git a/python/dev_requirements.txt b/python/dev_requirements.txt index 6a5e7f924..94ab40189 100644 --- a/python/dev_requirements.txt +++ b/python/dev_requirements.txt @@ -1,3 +1,3 @@ -r requirements.txt pyparsing>=2.4.2 -mypy==1.4.1 #TODO(Varun) A bug in mypy>=1.5.0 causes an unresolved placeholder error when importing numpy>=2.0.0 (https://github.com/python/mypy/issues/17396) \ No newline at end of file +pybind11-stubgen>=2.5.1 \ No newline at end of file From e36fa332f2752ae14f825831721d7037fd49f7bc Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 26 Sep 2024 11:51:41 -0400 Subject: [PATCH 336/398] Revert "Revert "Fix compile error on Clang 18, tweak for better type generation"" This reverts commit 6190e3d13ffec5b7435fd9e3d62a2a94a9d6fb9f. --- python/CMakeLists.txt | 4 ++-- python/gtsam/gtsam.tpl | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 404f5ecc0..674f17d3f 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -257,7 +257,7 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON) COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${GTSAM_PYTHON_BUILD_DIRECTORY}/$ENV{PYTHONPATH}" - pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" gtsam_unstable + pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" --numpy-array-use-type-var gtsam_unstable DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} ${GTSAM_PYTHON_UNSTABLE_TARGET} WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/" ) @@ -284,7 +284,7 @@ add_custom_target( COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${GTSAM_PYTHON_BUILD_DIRECTORY}/$ENV{PYTHONPATH}" - pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" gtsam + pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" --numpy-array-use-type-var gtsam DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} ${GTSAM_PYTHON_TARGET} WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/" ) diff --git a/python/gtsam/gtsam.tpl b/python/gtsam/gtsam.tpl index c72a216a2..6876b4ab4 100644 --- a/python/gtsam/gtsam.tpl +++ b/python/gtsam/gtsam.tpl @@ -39,12 +39,12 @@ namespace py = pybind11; {module_def} {{ m_.doc() = "pybind11 wrapper of {module_name}"; +// Specializations for STL classes +#include "python/gtsam/specializations/{module_name}.h" + {submodules_init} {wrapped_namespace} -// Specializations for STL classes -#include "python/gtsam/specializations/{module_name}.h" - }} From 580dcb27f4cca11fd0708783d5a1b5e7b78141d0 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 26 Sep 2024 12:12:26 -0400 Subject: [PATCH 337/398] ignore errors in stub generation --- python/CMakeLists.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 674f17d3f..501218924 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -257,7 +257,7 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON) COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${GTSAM_PYTHON_BUILD_DIRECTORY}/$ENV{PYTHONPATH}" - pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" --numpy-array-use-type-var gtsam_unstable + pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" --numpy-array-use-type-var --ignore-all-errors gtsam_unstable DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} ${GTSAM_PYTHON_UNSTABLE_TARGET} WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/" ) @@ -284,7 +284,7 @@ add_custom_target( COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${GTSAM_PYTHON_BUILD_DIRECTORY}/$ENV{PYTHONPATH}" - pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" --numpy-array-use-type-var gtsam + pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" --numpy-array-use-type-var --ignore-all-errors gtsam DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} ${GTSAM_PYTHON_TARGET} WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/" ) From 1f11b5472f957de00e614242fbc3e00c578125c3 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 26 Sep 2024 10:41:04 -0700 Subject: [PATCH 338/398] Initial attempt at removing extra arguments --- gtsam/hybrid/HybridGaussianConditional.cpp | 85 ++++++++++++---------- gtsam/hybrid/HybridGaussianConditional.h | 9 +-- gtsam/hybrid/HybridGaussianFactor.h | 2 +- 3 files changed, 49 insertions(+), 47 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index ef38237f2..00ead068a 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -28,56 +28,65 @@ #include namespace gtsam { -HybridGaussianFactor::FactorValuePairs GetFactorValuePairs( - const HybridGaussianConditional::Conditionals &conditionals) { - auto func = [](const GaussianConditional::shared_ptr &conditional) - -> GaussianFactorValuePair { - double value = 0.0; - // Check if conditional is pruned - if (conditional) { - // Assign log(\sqrt(|2πΣ|)) = -log(1 / sqrt(|2πΣ|)) - value = conditional->negLogConstant(); - } - return {std::dynamic_pointer_cast(conditional), value}; - }; - return HybridGaussianFactor::FactorValuePairs(conditionals, func); -} - HybridGaussianConditional::HybridGaussianConditional( - const KeyVector &continuousFrontals, const KeyVector &continuousParents, const DiscreteKeys &discreteParents, const HybridGaussianConditional::Conditionals &conditionals) - : BaseFactor(CollectKeys(continuousFrontals, continuousParents), - discreteParents, GetFactorValuePairs(conditionals)), - BaseConditional(continuousFrontals.size()), - conditionals_(conditionals) { - // Calculate negLogConstant_ as the minimum of the negative-log normalizers of - // the conditionals, by visiting the decision tree: + : BaseConditional(0) { // Initialize with zero; we'll set it properly later + + // Check if conditionals are empty + if (conditionals.empty()) { + throw std::invalid_argument("Conditionals cannot be empty"); + } + + KeyVector frontals, parents; negLogConstant_ = std::numeric_limits::infinity(); - conditionals_.visit( - [this](const GaussianConditional::shared_ptr &conditional) { - if (conditional) { - this->negLogConstant_ = - std::min(this->negLogConstant_, conditional->negLogConstant()); - } - }); + auto func = + [&](const GaussianConditional::shared_ptr &c) -> GaussianFactorValuePair { + double value = 0.0; + // Check if conditional is pruned + if (c) { + KeyVector cf(c->frontals().begin(), c->frontals().end()); + KeyVector cp(c->parents().begin(), c->parents().end()); + if (frontals.empty()) { + // Get frontal/parent keys from first conditional. + frontals = cf; + parents = cp; + } else if (cf != frontals || cp != parents) { + throw std::invalid_argument( + "All conditionals must have the same frontals and parents"); + } + // Assign log(\sqrt(|2πΣ|)) = -log(1 / sqrt(|2πΣ|)) + value = c->negLogConstant(); + this->negLogConstant_ = std::min(this->negLogConstant_, value); + } + return {std::dynamic_pointer_cast(c), value}; + }; + BaseFactor::factors_ = HybridGaussianFactor::Factors(conditionals, func); + + // Initialize base classes + KeyVector continuousKeys = frontals; + continuousKeys.insert(continuousKeys.end(), parents.begin(), parents.end()); + BaseFactor::keys_ = continuousKeys; + BaseFactor::discreteKeys_ = discreteParents; + BaseConditional::nrFrontals_ = frontals.size(); + + // Assign conditionals + conditionals_ = conditionals; // TODO(frank): a duplicate of factors_ !!! } +/* *******************************************************************************/ +HybridGaussianConditional::HybridGaussianConditional( + const DiscreteKey &discreteParent, + const std::vector &conditionals) + : HybridGaussianConditional(DiscreteKeys{discreteParent}, + Conditionals({discreteParent}, conditionals)) {} + /* *******************************************************************************/ const HybridGaussianConditional::Conditionals & HybridGaussianConditional::conditionals() const { return conditionals_; } -/* *******************************************************************************/ -HybridGaussianConditional::HybridGaussianConditional( - const KeyVector &continuousFrontals, const KeyVector &continuousParents, - const DiscreteKey &discreteParent, - const std::vector &conditionals) - : HybridGaussianConditional(continuousFrontals, continuousParents, - DiscreteKeys{discreteParent}, - Conditionals({discreteParent}, conditionals)) {} - /* *******************************************************************************/ GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree() const { diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 9c70cf6cb..6198f65fb 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -95,17 +95,13 @@ class GTSAM_EXPORT HybridGaussianConditional /** * @brief Construct a new HybridGaussianConditional object. * - * @param continuousFrontals the continuous frontals. - * @param continuousParents the continuous parents. * @param discreteParents the discrete parents. Will be placed last. * @param conditionals a decision tree of GaussianConditionals. The number of * conditionals should be C^(number of discrete parents), where C is the * cardinality of the DiscreteKeys in discreteParents, since the * discreteParents will be used as the labels in the decision tree. */ - HybridGaussianConditional(const KeyVector &continuousFrontals, - const KeyVector &continuousParents, - const DiscreteKeys &discreteParents, + HybridGaussianConditional(const DiscreteKeys &discreteParents, const Conditionals &conditionals); /** @@ -113,14 +109,11 @@ class GTSAM_EXPORT HybridGaussianConditional * a vector of Gaussian conditionals. * The DecisionTree-based constructor is preferred over this one. * - * @param continuousFrontals The continuous frontal variables - * @param continuousParents The continuous parent variables * @param discreteParent Single discrete parent variable * @param conditionals Vector of conditionals with the same size as the * cardinality of the discrete parent. */ HybridGaussianConditional( - const KeyVector &continuousFrontals, const KeyVector &continuousParents, const DiscreteKey &discreteParent, const std::vector &conditionals); diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 817e54e56..1aa6a0263 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -69,7 +69,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// typedef for Decision Tree of Gaussian factors. using Factors = DecisionTree; - private: + protected: /// Decision tree of Gaussian factors indexed by discrete keys. Factors factors_; From 977112d004dc0a5bf34de17475373ddb46fd1791 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 26 Sep 2024 11:28:41 -0700 Subject: [PATCH 339/398] Avoid double traversal --- gtsam/hybrid/HybridFactor.h | 4 +--- gtsam/hybrid/HybridGaussianConditional.cpp | 21 +++++++++++++-------- gtsam/hybrid/HybridGaussianFactor.cpp | 8 ++++---- gtsam/hybrid/HybridGaussianFactor.h | 3 +++ 4 files changed, 21 insertions(+), 15 deletions(-) diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index fc91e0838..1e99c1365 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -56,11 +56,9 @@ class GTSAM_EXPORT HybridFactor : public Factor { /// Enum to help with categorizing hybrid factors. enum class Category { None, Discrete, Continuous, Hybrid }; - private: + protected: /// Record what category of HybridFactor this is. Category category_ = Category::None; - - protected: // Set of DiscreteKeys for this factor. DiscreteKeys discreteKeys_; /// Record continuous keys for book-keeping diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 00ead068a..903542c24 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -61,17 +61,22 @@ HybridGaussianConditional::HybridGaussianConditional( } return {std::dynamic_pointer_cast(c), value}; }; - BaseFactor::factors_ = HybridGaussianFactor::Factors(conditionals, func); + HybridGaussianFactor::FactorValuePairs pairs(conditionals, func); - // Initialize base classes - KeyVector continuousKeys = frontals; - continuousKeys.insert(continuousKeys.end(), parents.begin(), parents.end()); - BaseFactor::keys_ = continuousKeys; - BaseFactor::discreteKeys_ = discreteParents; + // Adjust frontals size BaseConditional::nrFrontals_ = frontals.size(); - // Assign conditionals - conditionals_ = conditionals; // TODO(frank): a duplicate of factors_ !!! + // Initialize HybridFactor + HybridFactor::category_ = HybridFactor::Category::Hybrid; + HybridFactor::discreteKeys_ = discreteParents; + HybridFactor::keys_ = frontals; + keys_.insert(keys_.end(), parents.begin(), parents.end()); + + // Initialize BaseFactor + BaseFactor::factors_ = BaseFactor::augment(pairs); // TODO(frank): expensive + + // Assign local conditionals. TODO(frank): a duplicate of factors_ !!! + conditionals_ = conditionals; } /* *******************************************************************************/ diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index d5773590b..af2d0fde5 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -38,11 +38,11 @@ namespace gtsam { * Gaussian factor in factors. * @return HybridGaussianFactor::Factors */ -static HybridGaussianFactor::Factors augment( - const HybridGaussianFactor::FactorValuePairs &factors) { +HybridGaussianFactor::Factors HybridGaussianFactor::augment( + const FactorValuePairs &factors) { // Find the minimum value so we can "proselytize" to positive values. // Done because we can't have sqrt of negative numbers. - HybridGaussianFactor::Factors gaussianFactors; + Factors gaussianFactors; AlgebraicDecisionTree valueTree; std::tie(gaussianFactors, valueTree) = unzip(factors); @@ -73,7 +73,7 @@ static HybridGaussianFactor::Factors augment( return std::dynamic_pointer_cast( std::make_shared(gfg)); }; - return HybridGaussianFactor::Factors(factors, update); + return Factors(factors, update); } /* *******************************************************************************/ diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 1aa6a0263..9aa505092 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -73,6 +73,9 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// Decision tree of Gaussian factors indexed by discrete keys. Factors factors_; + /// Helper function to "hide" the constants in the Jacobian factors. + static Factors augment(const FactorValuePairs &factors); + /** * @brief Helper function to return factors and functional to create a * DecisionTree of Gaussian Factor Graphs. From b45ba003cad605bd157f98b8fa88a9033aac44e7 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 26 Sep 2024 11:29:03 -0700 Subject: [PATCH 340/398] Simplify all call sites --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 2 +- gtsam/hybrid/tests/TinyHybridExample.h | 3 +- gtsam/hybrid/tests/testGaussianMixture.cpp | 6 +-- gtsam/hybrid/tests/testHybridBayesNet.cpp | 6 +-- gtsam/hybrid/tests/testHybridConditional.cpp | 2 + gtsam/hybrid/tests/testHybridEstimation.cpp | 10 +--- .../tests/testHybridGaussianConditional.cpp | 6 +-- .../hybrid/tests/testHybridGaussianFactor.cpp | 7 ++- .../tests/testHybridGaussianFactorGraph.cpp | 46 +++++-------------- .../hybrid/tests/testSerializationHybrid.cpp | 4 +- 10 files changed, 27 insertions(+), 65 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 82828bb41..540ee446b 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -414,7 +414,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, HybridGaussianConditional::Conditionals conditionals( eliminationResults, [](const Result &pair) { return pair.first; }); auto hybridGaussian = std::make_shared( - frontalKeys, continuousSeparator, discreteSeparator, conditionals); + discreteSeparator, conditionals); return {std::make_shared(hybridGaussian), newFactor}; } diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index 09905bf79..f2ff7dde2 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -46,8 +46,7 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1, std::vector conditionals{ GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 0.5), GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 3)}; - bayesNet.emplace_shared( - KeyVector{Z(i)}, KeyVector{X(0)}, mode_i, conditionals); + bayesNet.emplace_shared(mode_i, conditionals); } // Create prior on X(0). diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index be4ba2ff4..cde7e4063 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -43,9 +43,6 @@ const DiscreteValues m1Assignment{{M(0), 1}}; DiscreteConditional::shared_ptr mixing = std::make_shared(m, "60/40"); -// define Continuous keys -const KeyVector continuousKeys{Z(0)}; - /** * Create a simple Gaussian Mixture Model represented as p(z|m)P(m) * where m is a discrete variable and z is a continuous variable. @@ -61,8 +58,7 @@ HybridBayesNet GaussianMixtureModel(double mu0, double mu1, double sigma0, model0), c1 = std::make_shared(Z(0), Vector1(mu1), I_1x1, model1); - hbn.emplace_shared(continuousKeys, KeyVector{}, m, - std::vector{c0, c1}); + hbn.emplace_shared(m, std::vector{c0, c1}); hbn.push_back(mixing); return hbn; } diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 6e23db91e..b049b03fd 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -108,8 +108,7 @@ TEST(HybridBayesNet, evaluateHybrid) { HybridBayesNet bayesNet; bayesNet.push_back(continuousConditional); bayesNet.emplace_shared( - KeyVector{X(1)}, KeyVector{}, Asia, - std::vector{conditional0, conditional1}); + Asia, std::vector{conditional0, conditional1}); bayesNet.emplace_shared(Asia, "99/1"); // Create values at which to evaluate. @@ -169,8 +168,7 @@ TEST(HybridBayesNet, Error) { X(1), Vector1::Constant(2), I_1x1, model1); auto gm = std::make_shared( - KeyVector{X(1)}, KeyVector{}, Asia, - std::vector{conditional0, conditional1}); + Asia, std::vector{conditional0, conditional1}); // Create hybrid Bayes net. HybridBayesNet bayesNet; bayesNet.push_back(continuousConditional); diff --git a/gtsam/hybrid/tests/testHybridConditional.cpp b/gtsam/hybrid/tests/testHybridConditional.cpp index 365dde3bc..106ae28e5 100644 --- a/gtsam/hybrid/tests/testHybridConditional.cpp +++ b/gtsam/hybrid/tests/testHybridConditional.cpp @@ -39,6 +39,8 @@ TEST(HybridConditional, Invariants) { const DiscreteValues d{{M(0), 1}}; const HybridValues values{c, d}; + GTSAM_PRINT(bn); + // Check invariants for p(z|x,m) auto hc0 = bn.at(0); CHECK(hc0->isHybrid()); diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 4899205df..6c69607a4 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -623,10 +623,7 @@ TEST(HybridEstimation, ModeSelection) { Z_1x1, noise_loose), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_tight)}; - bn.emplace_shared( - KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode}, - HybridGaussianConditional::Conditionals(DiscreteKeys{mode}, - conditionals)); + bn.emplace_shared(mode, conditionals); VectorValues vv; vv.insert(Z(0), Z_1x1); @@ -658,10 +655,7 @@ TEST(HybridEstimation, ModeSelection2) { Z_3x1, noise_loose), GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_tight)}; - bn.emplace_shared( - KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode}, - HybridGaussianConditional::Conditionals(DiscreteKeys{mode}, - conditionals)); + bn.emplace_shared(mode, conditionals); VectorValues vv; vv.insert(Z(0), Z_3x1); diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index adcf6c70f..02163df9e 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -52,8 +52,7 @@ const std::vector conditionals{ commonSigma), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), commonSigma)}; -const HybridGaussianConditional hybrid_conditional({Z(0)}, {X(0)}, mode, - conditionals); +const HybridGaussianConditional hybrid_conditional(mode, conditionals); } // namespace equal_constants /* ************************************************************************* */ @@ -158,8 +157,7 @@ const std::vector conditionals{ 0.5), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), 3.0)}; -const HybridGaussianConditional hybrid_conditional({Z(0)}, {X(0)}, mode, - conditionals); +const HybridGaussianConditional hybrid_conditional(mode, conditionals); } // namespace mode_dependent_constants /* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 55c3bccfb..5df9f4811 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -169,7 +169,7 @@ TEST(HybridGaussianFactor, HybridGaussianConditional) { auto gaussians = std::make_shared(); HybridGaussianConditional::Conditionals conditionals(gaussians); - HybridGaussianConditional gm({}, keys, dKeys, conditionals); + HybridGaussianConditional gm(dKeys, conditionals); EXPECT_LONGS_EQUAL(2, gm.discreteKeys().size()); } @@ -234,9 +234,8 @@ static HybridGaussianConditional::shared_ptr CreateHybridMotionModel( -I_1x1, model1); DiscreteKeys discreteParents{m1}; return std::make_shared( - KeyVector{X(1)}, KeyVector{X(0)}, discreteParents, - HybridGaussianConditional::Conditionals(discreteParents, - std::vector{c0, c1})); + discreteParents, HybridGaussianConditional::Conditionals( + discreteParents, std::vector{c0, c1})); } /// Create two state Bayes network with 1 or two measurement models diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index b1c68adf3..6e817d312 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -72,13 +72,10 @@ TEST(HybridGaussianFactorGraph, Creation) { // Define a hybrid gaussian conditional P(x0|x1, c0) // and add it to the factor graph. HybridGaussianConditional gm( - {X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}), - HybridGaussianConditional::Conditionals( - M(0), - std::make_shared(X(0), Z_3x1, I_3x3, X(1), - I_3x3), - std::make_shared(X(0), Vector3::Ones(), I_3x3, - X(1), I_3x3))); + {M(0), 2}, + {std::make_shared(X(0), Z_3x1, I_3x3, X(1), I_3x3), + std::make_shared(X(0), Vector3::Ones(), I_3x3, X(1), + I_3x3)}); hfg.add(gm); EXPECT_LONGS_EQUAL(2, hfg.size()); @@ -654,11 +651,7 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) { x0, -I_1x1, model0), c1 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, x0, -I_1x1, model1); - DiscreteKeys discreteParents{m1}; - hbn.emplace_shared( - KeyVector{f01}, KeyVector{x0, x1}, discreteParents, - HybridGaussianConditional::Conditionals(discreteParents, - std::vector{c0, c1})); + hbn.emplace_shared(m1, std::vector{c0, c1}); // Discrete uniform prior. hbn.emplace_shared(m1, "0.5/0.5"); @@ -830,11 +823,8 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) { X(0), Vector1(14.1421), I_1x1 * 2.82843), conditional1 = std::make_shared( X(0), Vector1(10.1379), I_1x1 * 2.02759); - DiscreteKeys discreteParents{mode}; expectedBayesNet.emplace_shared( - KeyVector{X(0)}, KeyVector{}, discreteParents, - HybridGaussianConditional::Conditionals( - discreteParents, std::vector{conditional0, conditional1})); + mode, std::vector{conditional0, conditional1}); // Add prior on mode. expectedBayesNet.emplace_shared(mode, "74/26"); @@ -860,10 +850,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { std::vector conditionals{ GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 0.5)}; - auto gm = std::make_shared( - KeyVector{Z(0)}, KeyVector{X(0)}, DiscreteKeys{mode}, - HybridGaussianConditional::Conditionals(DiscreteKeys{mode}, - conditionals)); + auto gm = std::make_shared(mode, conditionals); bn.push_back(gm); // Create prior on X(0). @@ -891,9 +878,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { conditional1 = std::make_shared( X(0), Vector1(14.1421), I_1x1 * 2.82843); expectedBayesNet.emplace_shared( - KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, - HybridGaussianConditional::Conditionals( - DiscreteKeys{mode}, std::vector{conditional0, conditional1})); + mode, std::vector{conditional0, conditional1}); // Add prior on mode. expectedBayesNet.emplace_shared(mode, "1/1"); @@ -929,9 +914,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) { conditional1 = std::make_shared( X(0), Vector1(10.274), I_1x1 * 2.0548); expectedBayesNet.emplace_shared( - KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, - HybridGaussianConditional::Conditionals( - DiscreteKeys{mode}, std::vector{conditional0, conditional1})); + mode, std::vector{conditional0, conditional1}); // Add prior on mode. expectedBayesNet.emplace_shared(mode, "23/77"); @@ -980,10 +963,7 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), Z_1x1, 0.5), GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), Z_1x1, 3.0)}; - bn.emplace_shared( - KeyVector{Z(t)}, KeyVector{X(t)}, DiscreteKeys{noise_mode_t}, - HybridGaussianConditional::Conditionals(DiscreteKeys{noise_mode_t}, - conditionals)); + bn.emplace_shared(noise_mode_t, conditionals); // Create prior on discrete mode N(t): bn.emplace_shared(noise_mode_t, "20/80"); @@ -999,10 +979,8 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { 0.2), GaussianConditional::sharedMeanAndStddev(X(t), I_1x1, X(t - 1), I_1x1, 0.2)}; - auto gm = std::make_shared( - KeyVector{X(t)}, KeyVector{X(t - 1)}, DiscreteKeys{motion_model_t}, - HybridGaussianConditional::Conditionals(DiscreteKeys{motion_model_t}, - conditionals)); + auto gm = std::make_shared(motion_model_t, + conditionals); bn.push_back(gm); // Create prior on motion model M(t): diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp index 4bf90019b..6a672a3d2 100644 --- a/gtsam/hybrid/tests/testSerializationHybrid.cpp +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -115,9 +115,7 @@ TEST(HybridSerialization, HybridGaussianConditional) { GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 0.5)); const auto conditional1 = std::make_shared( GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3)); - const HybridGaussianConditional gm({Z(0)}, {X(0)}, {mode}, - HybridGaussianConditional::Conditionals( - {mode}, {conditional0, conditional1})); + const HybridGaussianConditional gm(mode, {conditional0, conditional1}); EXPECT(equalsObj(gm)); EXPECT(equalsXML(gm)); From ebebf7ddd547ef968fec90e42de09efa3c6de634 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 26 Sep 2024 11:43:05 -0700 Subject: [PATCH 341/398] Fix initialization issue with helper class --- gtsam/hybrid/HybridGaussianConditional.cpp | 46 ++++++++-------------- gtsam/hybrid/HybridGaussianConditional.h | 20 ++++++++++ 2 files changed, 36 insertions(+), 30 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 903542c24..07c80f58c 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -28,57 +28,43 @@ #include namespace gtsam { -HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKeys &discreteParents, - const HybridGaussianConditional::Conditionals &conditionals) - : BaseConditional(0) { // Initialize with zero; we'll set it properly later +/* *******************************************************************************/ +HybridGaussianConditional::ConstructorHelper::ConstructorHelper( + const HybridGaussianConditional::Conditionals &conditionals) { + negLogConstant = std::numeric_limits::infinity(); - // Check if conditionals are empty - if (conditionals.empty()) { - throw std::invalid_argument("Conditionals cannot be empty"); - } - - KeyVector frontals, parents; - negLogConstant_ = std::numeric_limits::infinity(); auto func = [&](const GaussianConditional::shared_ptr &c) -> GaussianFactorValuePair { double value = 0.0; - // Check if conditional is pruned if (c) { KeyVector cf(c->frontals().begin(), c->frontals().end()); KeyVector cp(c->parents().begin(), c->parents().end()); if (frontals.empty()) { - // Get frontal/parent keys from first conditional. frontals = cf; parents = cp; } else if (cf != frontals || cp != parents) { throw std::invalid_argument( "All conditionals must have the same frontals and parents"); } - // Assign log(\sqrt(|2πΣ|)) = -log(1 / sqrt(|2πΣ|)) value = c->negLogConstant(); - this->negLogConstant_ = std::min(this->negLogConstant_, value); + negLogConstant = std::min(negLogConstant, value); } return {std::dynamic_pointer_cast(c), value}; }; - HybridGaussianFactor::FactorValuePairs pairs(conditionals, func); + pairs = HybridGaussianFactor::FactorValuePairs(conditionals, func); - // Adjust frontals size - BaseConditional::nrFrontals_ = frontals.size(); - - // Initialize HybridFactor - HybridFactor::category_ = HybridFactor::Category::Hybrid; - HybridFactor::discreteKeys_ = discreteParents; - HybridFactor::keys_ = frontals; - keys_.insert(keys_.end(), parents.begin(), parents.end()); - - // Initialize BaseFactor - BaseFactor::factors_ = BaseFactor::augment(pairs); // TODO(frank): expensive - - // Assign local conditionals. TODO(frank): a duplicate of factors_ !!! - conditionals_ = conditionals; + // Build continuousKeys + continuousKeys = frontals; + continuousKeys.insert(continuousKeys.end(), parents.begin(), parents.end()); } +/* *******************************************************************************/ +HybridGaussianConditional::HybridGaussianConditional( + const DiscreteKeys &discreteParents, + const HybridGaussianConditional::Conditionals &conditionals) + : HybridGaussianConditional(discreteParents, conditionals, + ConstructorHelper(conditionals)) {} + /* *******************************************************************************/ HybridGaussianConditional::HybridGaussianConditional( const DiscreteKey &discreteParent, diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 6198f65fb..c857ec162 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -68,6 +68,26 @@ class GTSAM_EXPORT HybridGaussianConditional ///< Take advantage of the neg-log space so everything is a minimization double negLogConstant_; + /// Helper struct for private constructor. + struct ConstructorHelper { + KeyVector frontals; + KeyVector parents; + KeyVector continuousKeys; + HybridGaussianFactor::FactorValuePairs pairs; + double negLogConstant; + ConstructorHelper(const Conditionals &conditionals); + }; + + /// Private constructor + HybridGaussianConditional( + const DiscreteKeys &discreteParents, + const HybridGaussianConditional::Conditionals &conditionals, + const ConstructorHelper &helper) + : BaseFactor(helper.continuousKeys, discreteParents, helper.pairs), + BaseConditional(helper.frontals.size()), + conditionals_(conditionals), + negLogConstant_(helper.negLogConstant) {} + /** * @brief Convert a HybridGaussianConditional of conditionals into * a DecisionTree of Gaussian factor graphs. From e04f0afd0e76a4ccb335a09ae9ea9ec2b2f7a7b4 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 26 Sep 2024 11:45:09 -0700 Subject: [PATCH 342/398] De-clutter header --- gtsam/hybrid/HybridGaussianConditional.h | 65 +++++++++----------- gtsam/hybrid/tests/testHybridConditional.cpp | 2 - 2 files changed, 28 insertions(+), 39 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index c857ec162..ff9391d67 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -64,47 +64,11 @@ class GTSAM_EXPORT HybridGaussianConditional private: Conditionals conditionals_; ///< a decision tree of Gaussian conditionals. + ///< Negative-log of the normalization constant (log(\sqrt(|2πΣ|))). ///< Take advantage of the neg-log space so everything is a minimization double negLogConstant_; - /// Helper struct for private constructor. - struct ConstructorHelper { - KeyVector frontals; - KeyVector parents; - KeyVector continuousKeys; - HybridGaussianFactor::FactorValuePairs pairs; - double negLogConstant; - ConstructorHelper(const Conditionals &conditionals); - }; - - /// Private constructor - HybridGaussianConditional( - const DiscreteKeys &discreteParents, - const HybridGaussianConditional::Conditionals &conditionals, - const ConstructorHelper &helper) - : BaseFactor(helper.continuousKeys, discreteParents, helper.pairs), - BaseConditional(helper.frontals.size()), - conditionals_(conditionals), - negLogConstant_(helper.negLogConstant) {} - - /** - * @brief Convert a HybridGaussianConditional of conditionals into - * a DecisionTree of Gaussian factor graphs. - */ - GaussianFactorGraphTree asGaussianFactorGraphTree() const; - - /** - * @brief Helper function to get the pruner functor. - * - * @param discreteProbs The pruned discrete probabilities. - * @return std::function &, const GaussianConditional::shared_ptr &)> - */ - std::function &, const GaussianConditional::shared_ptr &)> - prunerFunc(const DecisionTreeFactor &discreteProbs); - public: /// @name Constructors /// @{ @@ -220,6 +184,33 @@ class GTSAM_EXPORT HybridGaussianConditional /// @} private: + /// Helper struct for private constructor. + struct ConstructorHelper { + KeyVector frontals, parents, continuousKeys; + HybridGaussianFactor::FactorValuePairs pairs; + double negLogConstant; + /// Compute all variables needed for the private constructor below. + ConstructorHelper(const Conditionals &conditionals); + }; + + /// Private constructor that uses helper struct above. + HybridGaussianConditional( + const DiscreteKeys &discreteParents, + const HybridGaussianConditional::Conditionals &conditionals, + const ConstructorHelper &helper) + : BaseFactor(helper.continuousKeys, discreteParents, helper.pairs), + BaseConditional(helper.frontals.size()), + conditionals_(conditionals), + negLogConstant_(helper.negLogConstant) {} + + /// Convert to a DecisionTree of Gaussian factor graphs. + GaussianFactorGraphTree asGaussianFactorGraphTree() const; + + //// Get the pruner functor from pruned discrete probabilities. + std::function &, const GaussianConditional::shared_ptr &)> + prunerFunc(const DecisionTreeFactor &prunedProbabilities); + /// Check whether `given` has values for all frontal keys. bool allFrontalsGiven(const VectorValues &given) const; diff --git a/gtsam/hybrid/tests/testHybridConditional.cpp b/gtsam/hybrid/tests/testHybridConditional.cpp index 106ae28e5..365dde3bc 100644 --- a/gtsam/hybrid/tests/testHybridConditional.cpp +++ b/gtsam/hybrid/tests/testHybridConditional.cpp @@ -39,8 +39,6 @@ TEST(HybridConditional, Invariants) { const DiscreteValues d{{M(0), 1}}; const HybridValues values{c, d}; - GTSAM_PRINT(bn); - // Check invariants for p(z|x,m) auto hc0 = bn.at(0); CHECK(hc0->isHybrid()); From e8089dc7cb7320c0ef9dc92829581bf7b9aeca51 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 26 Sep 2024 11:56:47 -0700 Subject: [PATCH 343/398] Made augment static --- gtsam/hybrid/HybridGaussianFactor.cpp | 6 ------ gtsam/hybrid/HybridGaussianFactor.h | 3 ++- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index af2d0fde5..3db6e7965 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -76,12 +76,6 @@ HybridGaussianFactor::Factors HybridGaussianFactor::augment( return Factors(factors, update); } -/* *******************************************************************************/ -HybridGaussianFactor::HybridGaussianFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys, - const FactorValuePairs &factors) - : Base(continuousKeys, discreteKeys), factors_(augment(factors)) {} - /* *******************************************************************************/ bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const { const This *e = dynamic_cast(&lf); diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 9aa505092..8e36e6615 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -133,7 +133,8 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { */ HybridGaussianFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const FactorValuePairs &factors); + const FactorValuePairs &factors) + : Base(continuousKeys, discreteKeys), factors_(augment(factors)) {} /// @} /// @name Testable From 35dd42ed2329c264f77f6b9553ca2c9ecc1446e0 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 26 Sep 2024 12:53:50 -0700 Subject: [PATCH 344/398] Three constructor variants extract keys --- gtsam/hybrid/HybridFactor.h | 4 + gtsam/hybrid/HybridGaussianFactor.cpp | 115 +++++++++++++++++++++++--- gtsam/hybrid/HybridGaussianFactor.h | 55 ++++++------ 3 files changed, 134 insertions(+), 40 deletions(-) diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index 1e99c1365..39a72eb26 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -140,6 +140,10 @@ class GTSAM_EXPORT HybridFactor : public Factor { /// @} + protected: + /// protected constructor to initialize the category + HybridFactor(Category category) : category_(category) {} + private: #ifdef GTSAM_ENABLE_BOOST_SERIALIZATION /** Serialization function */ diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index 3db6e7965..952bb7017 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -26,18 +26,11 @@ #include #include +#include "gtsam/hybrid/HybridFactor.h" + namespace gtsam { -/** - * @brief Helper function to augment the [A|b] matrices in the factor components - * with the additional scalar values. - * This is done by storing the value in - * the `b` vector as an additional row. - * - * @param factors DecisionTree of GaussianFactors and arbitrary scalars. - * Gaussian factor in factors. - * @return HybridGaussianFactor::Factors - */ +/* *******************************************************************************/ HybridGaussianFactor::Factors HybridGaussianFactor::augment( const FactorValuePairs &factors) { // Find the minimum value so we can "proselytize" to positive values. @@ -76,13 +69,111 @@ HybridGaussianFactor::Factors HybridGaussianFactor::augment( return Factors(factors, update); } +/* *******************************************************************************/ +HybridGaussianFactor::HybridGaussianFactor( + const DiscreteKey &discreteKey, + const std::vector &factors) + : Base(HybridFactor::Category::Hybrid) { + // Extract continuous keys from first-null factor and verify all others + KeyVector continuousKeys; + for (const auto &factor : factors) { + if (!factor) continue; + if (continuousKeys.empty()) { + continuousKeys = factor->keys(); + } else if (factor->keys() != continuousKeys) { + throw std::invalid_argument("All factors must have the same keys"); + } + } + + // Check that this worked. + if (continuousKeys.empty()) { + throw std::invalid_argument("Need at least one non-null factor."); + } + + // Initialize the base class + Factor::keys_ = continuousKeys; + Factor::keys_.push_back(discreteKey.first); + Base::discreteKeys_ = {discreteKey}; + Base::continuousKeys_ = continuousKeys; + + // Build the DecisionTree from factor vector + factors_ = Factors({discreteKey}, factors); +} + +/* *******************************************************************************/ +HybridGaussianFactor::HybridGaussianFactor( + const DiscreteKey &discreteKey, + const std::vector &factorPairs) + : Base(HybridFactor::Category::Hybrid) { + // Extract continuous keys from first-null factor and verify all others + KeyVector continuousKeys; + for (const auto &pair : factorPairs) { + if (!pair.first) continue; + if (continuousKeys.empty()) { + continuousKeys = pair.first->keys(); + } else if (pair.first->keys() != continuousKeys) { + throw std::invalid_argument("All factors must have the same keys"); + } + } + + // Check that this worked. + if (continuousKeys.empty()) { + throw std::invalid_argument("Need at least one non-null factor."); + } + + // Initialize the base class + Factor::keys_ = continuousKeys; + Factor::keys_.push_back(discreteKey.first); + Base::discreteKeys_ = {discreteKey}; + Base::continuousKeys_ = continuousKeys; + + // Build the FactorValuePairs DecisionTree + FactorValuePairs pairTree({discreteKey}, factorPairs); + + // Assign factors_ after calling augment + factors_ = augment(pairTree); +} + +/* *******************************************************************************/ +HybridGaussianFactor::HybridGaussianFactor(const DiscreteKeys &discreteKeys, + const FactorValuePairs &factorPairs) + : Base(HybridFactor::Category::Hybrid) { + // Verify that all factors have the same keys + KeyVector continuousKeys; + factorPairs.visit([&](const GaussianFactorValuePair &pair) { + if (pair.first) { + if (continuousKeys.empty()) { + continuousKeys = pair.first->keys(); + } else if (pair.first->keys() != continuousKeys) { + throw std::invalid_argument("All factors must have the same keys"); + } + } + }); + + // Check that this worked. + if (continuousKeys.empty()) { + throw std::invalid_argument("Need at least one non-null factor."); + } + + // Initialize the base class + Factor::keys_ = continuousKeys; + for (const auto &discreteKey : discreteKeys) { + Factor::keys_.push_back(discreteKey.first); + } + Base::discreteKeys_ = discreteKeys; + Base::continuousKeys_ = continuousKeys; + + // Assign factors_ after calling augment + factors_ = augment(factorPairs); +} + /* *******************************************************************************/ bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const { const This *e = dynamic_cast(&lf); if (e == nullptr) return false; - // This will return false if either factors_ is empty or e->factors_ is empty, - // but not if both are empty or both are not empty: + // This will return false if either factors_ is empty or e->factors_ is + // empty, but not if both are empty or both are not empty: if (factors_.empty() ^ e->factors_.empty()) return false; // Check the base and the factors: diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 8e36e6615..0dc80ed36 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -73,17 +73,6 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// Decision tree of Gaussian factors indexed by discrete keys. Factors factors_; - /// Helper function to "hide" the constants in the Jacobian factors. - static Factors augment(const FactorValuePairs &factors); - - /** - * @brief Helper function to return factors and functional to create a - * DecisionTree of Gaussian Factor Graphs. - * - * @return GaussianFactorGraphTree - */ - GaussianFactorGraphTree asGaussianFactorGraphTree() const; - public: /// @name Constructors /// @{ @@ -96,14 +85,11 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * providing the factors for each mode m as a vector of factors ϕ_m(x). * The value ϕ(x,m) for the factor is simply ϕ_m(x). * - * @param continuousKeys Vector of keys for continuous factors. * @param discreteKey The discrete key for the "mode", indexing components. * @param factors Vector of gaussian factors, one for each mode. */ - HybridGaussianFactor(const KeyVector &continuousKeys, - const DiscreteKey &discreteKey, - const std::vector &factors) - : Base(continuousKeys, {discreteKey}), factors_({discreteKey}, factors) {} + HybridGaussianFactor(const DiscreteKey &discreteKey, + const std::vector &factors); /** * @brief Construct a new HybridGaussianFactor on a single discrete key, @@ -111,15 +97,11 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * provided as a vector of pairs (ϕ_m(x), E_m). * The value ϕ(x,m) for the factor is now ϕ_m(x) + E_m. * - * @param continuousKeys Vector of keys for continuous factors. * @param discreteKey The discrete key for the "mode", indexing components. - * @param factors Vector of gaussian factor-scalar pairs, one per mode. + * @param factorPairs Vector of gaussian factor-scalar pairs, one per mode. */ - HybridGaussianFactor(const KeyVector &continuousKeys, - const DiscreteKey &discreteKey, - const std::vector &factors) - : HybridGaussianFactor(continuousKeys, {discreteKey}, - FactorValuePairs({discreteKey}, factors)) {} + HybridGaussianFactor(const DiscreteKey &discreteKey, + const std::vector &factorPairs); /** * @brief Construct a new HybridGaussianFactor on a several discrete keys M, @@ -127,14 +109,11 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * scalars are provided as a DecisionTree of pairs (ϕ_M(x), E_M). * The value ϕ(x,M) for the factor is again ϕ_m(x) + E_m. * - * @param continuousKeys A vector of keys representing continuous variables. * @param discreteKeys Discrete variables and their cardinalities. * @param factors The decision tree of Gaussian factor/scalar pairs. */ - HybridGaussianFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys, - const FactorValuePairs &factors) - : Base(continuousKeys, discreteKeys), factors_(augment(factors)) {} + HybridGaussianFactor(const DiscreteKeys &discreteKeys, + const FactorValuePairs &factors); /// @} /// @name Testable @@ -189,7 +168,27 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { } /// @} + protected: + /** + * @brief Helper function to return factors and functional to create a + * DecisionTree of Gaussian Factor Graphs. + * + * @return GaussianFactorGraphTree + */ + GaussianFactorGraphTree asGaussianFactorGraphTree() const; + private: + /** + * @brief Helper function to augment the [A|b] matrices in the factor + * components with the additional scalar values. This is done by storing the + * value in the `b` vector as an additional row. + * + * @param factors DecisionTree of GaussianFactors and arbitrary scalars. + * Gaussian factor in factors. + * @return HybridGaussianFactor::Factors + */ + static Factors augment(const FactorValuePairs &factors); + /// Helper method to compute the error of a component. double potentiallyPrunedComponentError( const sharedFactor &gf, const VectorValues &continuousValues) const; From 08cf399bbf3e656ba8caf1c5f482d84318c9ee24 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 26 Sep 2024 13:07:10 -0700 Subject: [PATCH 345/398] Fix all call sites --- gtsam/hybrid/HybridGaussianConditional.cpp | 8 ++---- gtsam/hybrid/HybridGaussianConditional.h | 4 +-- gtsam/hybrid/HybridGaussianFactor.cpp | 15 ----------- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 7 ++---- gtsam/hybrid/HybridNonlinearFactor.cpp | 2 +- gtsam/hybrid/tests/Switching.h | 2 +- gtsam/hybrid/tests/testGaussianMixture.cpp | 12 +++++++++ gtsam/hybrid/tests/testHybridEstimation.cpp | 4 +-- gtsam/hybrid/tests/testHybridFactorGraph.cpp | 2 +- .../hybrid/tests/testHybridGaussianFactor.cpp | 25 ++++++++----------- .../tests/testHybridGaussianFactorGraph.cpp | 18 ++++++------- .../hybrid/tests/testSerializationHybrid.cpp | 3 +-- 12 files changed, 43 insertions(+), 59 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 07c80f58c..46481e305 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -52,10 +52,6 @@ HybridGaussianConditional::ConstructorHelper::ConstructorHelper( return {std::dynamic_pointer_cast(c), value}; }; pairs = HybridGaussianFactor::FactorValuePairs(conditionals, func); - - // Build continuousKeys - continuousKeys = frontals; - continuousKeys.insert(continuousKeys.end(), parents.begin(), parents.end()); } /* *******************************************************************************/ @@ -222,8 +218,8 @@ std::shared_ptr HybridGaussianConditional::likelihood( return {likelihood_m, Cgm_Kgcm}; } }); - return std::make_shared( - continuousParentKeys, discreteParentKeys, likelihoods); + return std::make_shared(discreteParentKeys, + likelihoods); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index ff9391d67..15ef43173 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -186,7 +186,7 @@ class GTSAM_EXPORT HybridGaussianConditional private: /// Helper struct for private constructor. struct ConstructorHelper { - KeyVector frontals, parents, continuousKeys; + KeyVector frontals, parents; HybridGaussianFactor::FactorValuePairs pairs; double negLogConstant; /// Compute all variables needed for the private constructor below. @@ -198,7 +198,7 @@ class GTSAM_EXPORT HybridGaussianConditional const DiscreteKeys &discreteParents, const HybridGaussianConditional::Conditionals &conditionals, const ConstructorHelper &helper) - : BaseFactor(helper.continuousKeys, discreteParents, helper.pairs), + : BaseFactor(discreteParents, helper.pairs), BaseConditional(helper.frontals.size()), conditionals_(conditionals), negLogConstant_(helper.negLogConstant) {} diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index 952bb7017..12ec1d2a8 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -85,11 +85,6 @@ HybridGaussianFactor::HybridGaussianFactor( } } - // Check that this worked. - if (continuousKeys.empty()) { - throw std::invalid_argument("Need at least one non-null factor."); - } - // Initialize the base class Factor::keys_ = continuousKeys; Factor::keys_.push_back(discreteKey.first); @@ -116,11 +111,6 @@ HybridGaussianFactor::HybridGaussianFactor( } } - // Check that this worked. - if (continuousKeys.empty()) { - throw std::invalid_argument("Need at least one non-null factor."); - } - // Initialize the base class Factor::keys_ = continuousKeys; Factor::keys_.push_back(discreteKey.first); @@ -150,11 +140,6 @@ HybridGaussianFactor::HybridGaussianFactor(const DiscreteKeys &discreteKeys, } }); - // Check that this worked. - if (continuousKeys.empty()) { - throw std::invalid_argument("Need at least one non-null factor."); - } - // Initialize the base class Factor::keys_ = continuousKeys; for (const auto &discreteKey : discreteKeys) { diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 540ee446b..b519b3a0a 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -346,7 +346,6 @@ static std::shared_ptr createDiscreteFactor( // for conditional constants. static std::shared_ptr createHybridGaussianFactor( const DecisionTree &eliminationResults, - const KeyVector &continuousSeparator, const DiscreteKeys &discreteSeparator) { // Correct for the normalization constant used up by the conditional auto correct = [&](const Result &pair) -> GaussianFactorValuePair { @@ -364,8 +363,7 @@ static std::shared_ptr createHybridGaussianFactor( DecisionTree newFactors(eliminationResults, correct); - return std::make_shared(continuousSeparator, - discreteSeparator, newFactors); + return std::make_shared(discreteSeparator, newFactors); } static std::pair> @@ -407,8 +405,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, auto newFactor = continuousSeparator.empty() ? createDiscreteFactor(eliminationResults, discreteSeparator) - : createHybridGaussianFactor(eliminationResults, continuousSeparator, - discreteSeparator); + : createHybridGaussianFactor(eliminationResults, discreteSeparator); // Create the HybridGaussianConditional from the conditionals HybridGaussianConditional::Conditionals conditionals( diff --git a/gtsam/hybrid/HybridNonlinearFactor.cpp b/gtsam/hybrid/HybridNonlinearFactor.cpp index f5be6ded8..301d79593 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.cpp +++ b/gtsam/hybrid/HybridNonlinearFactor.cpp @@ -169,7 +169,7 @@ std::shared_ptr HybridNonlinearFactor::linearize( DecisionTree> linearized_factors(factors_, linearizeDT); - return std::make_shared(continuousKeys_, discreteKeys_, + return std::make_shared(discreteKeys_, linearized_factors); } diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 8836d04c8..9144e3b6d 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -65,7 +65,7 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( new JacobianFactor(x(t), I_3x3, x(t + 1), I_3x3, Z_3x1)); components.emplace_back( new JacobianFactor(x(t), I_3x3, x(t + 1), I_3x3, Vector3::Ones())); - hfg.add(HybridGaussianFactor({x(t), x(t + 1)}, {m(t), 2}, components)); + hfg.add(HybridGaussianFactor({m(t), 2}, components)); if (t > 1) { hfg.add(DecisionTreeFactor({{m(t - 1), 2}, {m(t), 2}}, "0 1 1 3")); diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index cde7e4063..743b98c66 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -101,6 +101,12 @@ TEST(GaussianMixture, GaussianMixtureModel) { auto hbn = GaussianMixtureModel(mu0, mu1, sigma, sigma); + // Check the number of keys matches what we expect + auto hgc = hbn.at(0)->asHybrid(); + EXPECT_LONGS_EQUAL(2, hgc->keys().size()); + EXPECT_LONGS_EQUAL(1, hgc->continuousKeys().size()); + EXPECT_LONGS_EQUAL(1, hgc->discreteKeys().size()); + // At the halfway point between the means, we should get P(m|z)=0.5 double midway = mu1 - mu0; auto pMid = SolveHBN(hbn, midway); @@ -135,6 +141,12 @@ TEST(GaussianMixture, GaussianMixtureModel2) { auto hbn = GaussianMixtureModel(mu0, mu1, sigma0, sigma1); + // Check the number of keys matches what we expect + auto hgc = hbn.at(0)->asHybrid(); + EXPECT_LONGS_EQUAL(2, hgc->keys().size()); + EXPECT_LONGS_EQUAL(1, hgc->continuousKeys().size()); + EXPECT_LONGS_EQUAL(1, hgc->discreteKeys().size()); + // We get zMax=3.1333 by finding the maximum value of the function, at which // point the mode m==1 is about twice as probable as m==0. double zMax = 3.133; diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 6c69607a4..2a20f5f5e 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -566,8 +566,8 @@ std::shared_ptr mixedVarianceFactor( [](const GaussianFactor::shared_ptr& gf) -> GaussianFactorValuePair { return {gf, 0.0}; }); - return std::make_shared( - gmf->continuousKeys(), gmf->discreteKeys(), updated_pairs); + return std::make_shared(gmf->discreteKeys(), + updated_pairs); } /****************************************************************************/ diff --git a/gtsam/hybrid/tests/testHybridFactorGraph.cpp b/gtsam/hybrid/tests/testHybridFactorGraph.cpp index e4d207af3..c6020de85 100644 --- a/gtsam/hybrid/tests/testHybridFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridFactorGraph.cpp @@ -55,7 +55,7 @@ TEST(HybridFactorGraph, Keys) { std::vector components{ std::make_shared(X(1), I_3x3, Z_3x1), std::make_shared(X(1), I_3x3, Vector3::Ones())}; - hfg.add(HybridGaussianFactor({X(1)}, m1, components)); + hfg.add(HybridGaussianFactor(m1, components)); KeySet expected_continuous{X(0), X(1)}; EXPECT( diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 5df9f4811..080185d88 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -70,14 +70,14 @@ auto f11 = std::make_shared(X(1), A1, X(2), A2, b); // Test simple to complex constructors... TEST(HybridGaussianFactor, ConstructorVariants) { using namespace test_constructor; - HybridGaussianFactor fromFactors({X(1), X(2)}, m1, {f10, f11}); + HybridGaussianFactor fromFactors(m1, {f10, f11}); std::vector pairs{{f10, 0.0}, {f11, 0.0}}; - HybridGaussianFactor fromPairs({X(1), X(2)}, m1, pairs); + HybridGaussianFactor fromPairs(m1, pairs); assert_equal(fromFactors, fromPairs); HybridGaussianFactor::FactorValuePairs decisionTree({m1}, pairs); - HybridGaussianFactor fromDecisionTree({X(1), X(2)}, {m1}, decisionTree); + HybridGaussianFactor fromDecisionTree({m1}, decisionTree); assert_equal(fromDecisionTree, fromPairs); } @@ -95,13 +95,12 @@ TEST(HybridGaussianFactor, Sum) { // TODO(Frank): why specify keys at all? And: keys in factor should be *all* // keys, deviating from Kevin's scheme. Should we index DT on DiscreteKey? // Design review! - HybridGaussianFactor hybridFactorA({X(1), X(2)}, m1, {f10, f11}); - HybridGaussianFactor hybridFactorB({X(1), X(3)}, m2, {f20, f21, f22}); + HybridGaussianFactor hybridFactorA(m1, {f10, f11}); + HybridGaussianFactor hybridFactorB(m2, {f20, f21, f22}); - // Check that number of keys is 3 + // Check the number of keys matches what we expect EXPECT_LONGS_EQUAL(3, hybridFactorA.keys().size()); - - // Check that number of discrete keys is 1 + EXPECT_LONGS_EQUAL(2, hybridFactorA.continuousKeys().size()); EXPECT_LONGS_EQUAL(1, hybridFactorA.discreteKeys().size()); // Create sum of two hybrid factors: it will be a decision tree now on both @@ -122,7 +121,7 @@ TEST(HybridGaussianFactor, Sum) { /* ************************************************************************* */ TEST(HybridGaussianFactor, Printing) { using namespace test_constructor; - HybridGaussianFactor hybridFactor({X(1), X(2)}, m1, {f10, f11}); + HybridGaussianFactor hybridFactor(m1, {f10, f11}); std::string expected = R"(HybridGaussianFactor @@ -159,10 +158,6 @@ Hybrid [x1 x2; 1]{ /* ************************************************************************* */ TEST(HybridGaussianFactor, HybridGaussianConditional) { - KeyVector keys; - keys.push_back(X(0)); - keys.push_back(X(1)); - DiscreteKeys dKeys; dKeys.emplace_back(M(0), 2); dKeys.emplace_back(M(1), 2); @@ -189,7 +184,7 @@ TEST(HybridGaussianFactor, Error) { auto f0 = std::make_shared(X(1), A01, X(2), A02, b); auto f1 = std::make_shared(X(1), A11, X(2), A12, b); - HybridGaussianFactor hybridFactor({X(1), X(2)}, m1, {f0, f1}); + HybridGaussianFactor hybridFactor(m1, {f0, f1}); VectorValues continuousValues; continuousValues.insert(X(1), Vector2(0, 0)); @@ -594,7 +589,7 @@ static HybridGaussianFactorGraph CreateFactorGraph( // the underlying scalar to be log(\sqrt(|2πΣ|)) std::vector factors{{f0, model0->negLogConstant()}, {f1, model1->negLogConstant()}}; - HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors); + HybridGaussianFactor motionFactor(m1, factors); HybridGaussianFactorGraph hfg; hfg.push_back(motionFactor); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 6e817d312..e4acda387 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -132,7 +132,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { // Add a hybrid gaussian factor ϕ(x1, c1) DiscreteKey m1(M(1), 2); - hfg.add(HybridGaussianFactor({X(1)}, m1, two::components(X(1)))); + hfg.add(HybridGaussianFactor(m1, two::components(X(1)))); auto result = hfg.eliminateSequential(); @@ -155,7 +155,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) { // Add factor between x0 and x1 hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - hfg.add(HybridGaussianFactor({X(1)}, m1, two::components(X(1)))); + hfg.add(HybridGaussianFactor(m1, two::components(X(1)))); // Discrete probability table for c1 hfg.add(DecisionTreeFactor(m1, {2, 8})); @@ -177,7 +177,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - hfg.add(HybridGaussianFactor({X(1)}, {M(1), 2}, two::components(X(1)))); + hfg.add(HybridGaussianFactor({M(1), 2}, two::components(X(1)))); hfg.add(DecisionTreeFactor(m1, {2, 8})); // TODO(Varun) Adding extra discrete variable not connected to continuous @@ -204,7 +204,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) { hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); // Hybrid factor P(x1|c1) - hfg.add(HybridGaussianFactor({X(1)}, m, two::components(X(1)))); + hfg.add(HybridGaussianFactor(m, two::components(X(1)))); // Prior factor on c1 hfg.add(DecisionTreeFactor(m, {2, 8})); @@ -229,8 +229,8 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1)); { - hfg.add(HybridGaussianFactor({X(0)}, {M(0), 2}, two::components(X(0)))); - hfg.add(HybridGaussianFactor({X(2)}, {M(1), 2}, two::components(X(2)))); + hfg.add(HybridGaussianFactor({M(0), 2}, two::components(X(0)))); + hfg.add(HybridGaussianFactor({M(1), 2}, two::components(X(2)))); } hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); @@ -239,8 +239,8 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { hfg.add(JacobianFactor(X(4), I_3x3, X(5), -I_3x3, Z_3x1)); { - hfg.add(HybridGaussianFactor({X(3)}, {M(3), 2}, two::components(X(3)))); - hfg.add(HybridGaussianFactor({X(5)}, {M(2), 2}, two::components(X(5)))); + hfg.add(HybridGaussianFactor({M(3), 2}, two::components(X(3)))); + hfg.add(HybridGaussianFactor({M(2), 2}, two::components(X(5)))); } auto ordering_full = @@ -525,7 +525,7 @@ TEST(HybridGaussianFactorGraph, optimize) { hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - hfg.add(HybridGaussianFactor({X(1)}, c1, two::components(X(1)))); + hfg.add(HybridGaussianFactor(c1, two::components(X(1)))); auto result = hfg.eliminateSequential(); diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp index 6a672a3d2..e09669117 100644 --- a/gtsam/hybrid/tests/testSerializationHybrid.cpp +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -75,7 +75,6 @@ BOOST_CLASS_EXPORT_GUID(HybridBayesNet, "gtsam_HybridBayesNet"); /* ****************************************************************************/ // Test HybridGaussianFactor serialization. TEST(HybridSerialization, HybridGaussianFactor) { - KeyVector continuousKeys{X(0)}; DiscreteKey discreteKey{M(0), 2}; auto A = Matrix::Zero(2, 1); @@ -85,7 +84,7 @@ TEST(HybridSerialization, HybridGaussianFactor) { auto f1 = std::make_shared(X(0), A, b1); std::vector factors{f0, f1}; - const HybridGaussianFactor factor(continuousKeys, discreteKey, factors); + const HybridGaussianFactor factor(discreteKey, factors); EXPECT(equalsObj(factor)); EXPECT(equalsXML(factor)); From 69b1313eed61e5e185b84d5cc745c0e31ef43ee7 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 26 Sep 2024 13:22:32 -0700 Subject: [PATCH 346/398] Working version with helper --- gtsam/hybrid/HybridGaussianFactor.cpp | 71 +++++++--------------- gtsam/hybrid/HybridGaussianFactor.h | 33 +++++++++- gtsam/hybrid/tests/testGaussianMixture.cpp | 12 ---- 3 files changed, 52 insertions(+), 64 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index 12ec1d2a8..279ac4069 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -70,14 +70,13 @@ HybridGaussianFactor::Factors HybridGaussianFactor::augment( } /* *******************************************************************************/ -HybridGaussianFactor::HybridGaussianFactor( +HybridGaussianFactor::ConstructorHelper::ConstructorHelper( const DiscreteKey &discreteKey, const std::vector &factors) - : Base(HybridFactor::Category::Hybrid) { - // Extract continuous keys from first-null factor and verify all others - KeyVector continuousKeys; + : discreteKeys({discreteKey}) { + // Extract continuous keys from the first non-null factor for (const auto &factor : factors) { - if (!factor) continue; + if (!factor) continue; // Skip null factors if (continuousKeys.empty()) { continuousKeys = factor->keys(); } else if (factor->keys() != continuousKeys) { @@ -85,25 +84,18 @@ HybridGaussianFactor::HybridGaussianFactor( } } - // Initialize the base class - Factor::keys_ = continuousKeys; - Factor::keys_.push_back(discreteKey.first); - Base::discreteKeys_ = {discreteKey}; - Base::continuousKeys_ = continuousKeys; - - // Build the DecisionTree from factor vector - factors_ = Factors({discreteKey}, factors); + // Build the DecisionTree from the factor vector + factorsTree = Factors(discreteKeys, factors); } /* *******************************************************************************/ -HybridGaussianFactor::HybridGaussianFactor( +HybridGaussianFactor::ConstructorHelper::ConstructorHelper( const DiscreteKey &discreteKey, const std::vector &factorPairs) - : Base(HybridFactor::Category::Hybrid) { - // Extract continuous keys from first-null factor and verify all others - KeyVector continuousKeys; + : discreteKeys({discreteKey}) { + // Extract continuous keys from the first non-null factor for (const auto &pair : factorPairs) { - if (!pair.first) continue; + if (!pair.first) continue; // Skip null factors if (continuousKeys.empty()) { continuousKeys = pair.first->keys(); } else if (pair.first->keys() != continuousKeys) { @@ -111,45 +103,26 @@ HybridGaussianFactor::HybridGaussianFactor( } } - // Initialize the base class - Factor::keys_ = continuousKeys; - Factor::keys_.push_back(discreteKey.first); - Base::discreteKeys_ = {discreteKey}; - Base::continuousKeys_ = continuousKeys; - // Build the FactorValuePairs DecisionTree - FactorValuePairs pairTree({discreteKey}, factorPairs); - - // Assign factors_ after calling augment - factors_ = augment(pairTree); + pairs = FactorValuePairs(discreteKeys, factorPairs); } /* *******************************************************************************/ -HybridGaussianFactor::HybridGaussianFactor(const DiscreteKeys &discreteKeys, - const FactorValuePairs &factorPairs) - : Base(HybridFactor::Category::Hybrid) { - // Verify that all factors have the same keys - KeyVector continuousKeys; +HybridGaussianFactor::ConstructorHelper::ConstructorHelper( + const DiscreteKeys &discreteKeys, const FactorValuePairs &factorPairs) + : discreteKeys(discreteKeys) { + // Extract continuous keys from the first non-null factor factorPairs.visit([&](const GaussianFactorValuePair &pair) { - if (pair.first) { - if (continuousKeys.empty()) { - continuousKeys = pair.first->keys(); - } else if (pair.first->keys() != continuousKeys) { - throw std::invalid_argument("All factors must have the same keys"); - } + if (!pair.first) return; // Skip null factors + if (continuousKeys.empty()) { + continuousKeys = pair.first->keys(); + } else if (pair.first->keys() != continuousKeys) { + throw std::invalid_argument("All factors must have the same keys"); } }); - // Initialize the base class - Factor::keys_ = continuousKeys; - for (const auto &discreteKey : discreteKeys) { - Factor::keys_.push_back(discreteKey.first); - } - Base::discreteKeys_ = discreteKeys; - Base::continuousKeys_ = continuousKeys; - - // Assign factors_ after calling augment - factors_ = augment(factorPairs); + // Build the FactorValuePairs DecisionTree + pairs = factorPairs; } /* *******************************************************************************/ diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 0dc80ed36..b20dc130c 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -89,7 +89,8 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * @param factors Vector of gaussian factors, one for each mode. */ HybridGaussianFactor(const DiscreteKey &discreteKey, - const std::vector &factors); + const std::vector &factors) + : HybridGaussianFactor(ConstructorHelper(discreteKey, factors)) {} /** * @brief Construct a new HybridGaussianFactor on a single discrete key, @@ -101,7 +102,8 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * @param factorPairs Vector of gaussian factor-scalar pairs, one per mode. */ HybridGaussianFactor(const DiscreteKey &discreteKey, - const std::vector &factorPairs); + const std::vector &factorPairs) + : HybridGaussianFactor(ConstructorHelper(discreteKey, factorPairs)) {} /** * @brief Construct a new HybridGaussianFactor on a several discrete keys M, @@ -113,7 +115,8 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * @param factors The decision tree of Gaussian factor/scalar pairs. */ HybridGaussianFactor(const DiscreteKeys &discreteKeys, - const FactorValuePairs &factors); + const FactorValuePairs &factors) + : HybridGaussianFactor(ConstructorHelper(discreteKeys, factors)) {} /// @} /// @name Testable @@ -193,6 +196,30 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { double potentiallyPrunedComponentError( const sharedFactor &gf, const VectorValues &continuousValues) const; + /// Helper struct to assist in constructing the HybridGaussianFactor + struct ConstructorHelper { + KeyVector continuousKeys; // Continuous keys extracted from factors + DiscreteKeys discreteKeys; // Discrete keys provided to the constructors + FactorValuePairs pairs; // Used only if factorsTree is empty + Factors factorsTree; + + ConstructorHelper( + const DiscreteKey &discreteKey, + const std::vector &factorsVec); + + ConstructorHelper(const DiscreteKey &discreteKey, + const std::vector &factorPairs); + + ConstructorHelper(const DiscreteKeys &discreteKeys, + const FactorValuePairs &factorPairs); + }; + + // Private constructor using ConstructorHelper + HybridGaussianFactor(const ConstructorHelper &helper) + : Base(helper.continuousKeys, helper.discreteKeys), + factors_(helper.factorsTree.empty() ? augment(helper.pairs) + : helper.factorsTree) {} + #ifdef GTSAM_ENABLE_BOOST_SERIALIZATION /** Serialization function */ friend class boost::serialization::access; diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index 743b98c66..cde7e4063 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -101,12 +101,6 @@ TEST(GaussianMixture, GaussianMixtureModel) { auto hbn = GaussianMixtureModel(mu0, mu1, sigma, sigma); - // Check the number of keys matches what we expect - auto hgc = hbn.at(0)->asHybrid(); - EXPECT_LONGS_EQUAL(2, hgc->keys().size()); - EXPECT_LONGS_EQUAL(1, hgc->continuousKeys().size()); - EXPECT_LONGS_EQUAL(1, hgc->discreteKeys().size()); - // At the halfway point between the means, we should get P(m|z)=0.5 double midway = mu1 - mu0; auto pMid = SolveHBN(hbn, midway); @@ -141,12 +135,6 @@ TEST(GaussianMixture, GaussianMixtureModel2) { auto hbn = GaussianMixtureModel(mu0, mu1, sigma0, sigma1); - // Check the number of keys matches what we expect - auto hgc = hbn.at(0)->asHybrid(); - EXPECT_LONGS_EQUAL(2, hgc->keys().size()); - EXPECT_LONGS_EQUAL(1, hgc->continuousKeys().size()); - EXPECT_LONGS_EQUAL(1, hgc->discreteKeys().size()); - // We get zMax=3.1333 by finding the maximum value of the function, at which // point the mode m==1 is about twice as probable as m==0. double zMax = 3.133; From 83fae8ecf8157a708a4404d8dfd7ce70f4a81cc8 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 26 Sep 2024 13:34:03 -0700 Subject: [PATCH 347/398] Remove error checking --- gtsam/hybrid/HybridGaussianConditional.cpp | 9 ++------- gtsam/hybrid/HybridGaussianFactor.cpp | 17 +++++------------ 2 files changed, 7 insertions(+), 19 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 46481e305..f3a699094 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -37,14 +37,9 @@ HybridGaussianConditional::ConstructorHelper::ConstructorHelper( [&](const GaussianConditional::shared_ptr &c) -> GaussianFactorValuePair { double value = 0.0; if (c) { - KeyVector cf(c->frontals().begin(), c->frontals().end()); - KeyVector cp(c->parents().begin(), c->parents().end()); if (frontals.empty()) { - frontals = cf; - parents = cp; - } else if (cf != frontals || cp != parents) { - throw std::invalid_argument( - "All conditionals must have the same frontals and parents"); + frontals = KeyVector(c->frontals().begin(), c->frontals().end()); + parents = KeyVector(c->parents().begin(), c->parents().end()); } value = c->negLogConstant(); negLogConstant = std::min(negLogConstant, value); diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index 279ac4069..938a79b19 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -76,11 +76,9 @@ HybridGaussianFactor::ConstructorHelper::ConstructorHelper( : discreteKeys({discreteKey}) { // Extract continuous keys from the first non-null factor for (const auto &factor : factors) { - if (!factor) continue; // Skip null factors - if (continuousKeys.empty()) { + if (factor && continuousKeys.empty()) { continuousKeys = factor->keys(); - } else if (factor->keys() != continuousKeys) { - throw std::invalid_argument("All factors must have the same keys"); + break; } } @@ -95,11 +93,9 @@ HybridGaussianFactor::ConstructorHelper::ConstructorHelper( : discreteKeys({discreteKey}) { // Extract continuous keys from the first non-null factor for (const auto &pair : factorPairs) { - if (!pair.first) continue; // Skip null factors - if (continuousKeys.empty()) { + if (pair.first && continuousKeys.empty()) { continuousKeys = pair.first->keys(); - } else if (pair.first->keys() != continuousKeys) { - throw std::invalid_argument("All factors must have the same keys"); + break; } } @@ -113,11 +109,8 @@ HybridGaussianFactor::ConstructorHelper::ConstructorHelper( : discreteKeys(discreteKeys) { // Extract continuous keys from the first non-null factor factorPairs.visit([&](const GaussianFactorValuePair &pair) { - if (!pair.first) return; // Skip null factors - if (continuousKeys.empty()) { + if (pair.first && continuousKeys.empty()) { continuousKeys = pair.first->keys(); - } else if (pair.first->keys() != continuousKeys) { - throw std::invalid_argument("All factors must have the same keys"); } }); From e18dd3e90500a4d40aaa3b746e2acf484f545da6 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 26 Sep 2024 13:38:54 -0700 Subject: [PATCH 348/398] Removed unneeded changes --- gtsam/hybrid/HybridFactor.h | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index 39a72eb26..fc91e0838 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -56,9 +56,11 @@ class GTSAM_EXPORT HybridFactor : public Factor { /// Enum to help with categorizing hybrid factors. enum class Category { None, Discrete, Continuous, Hybrid }; - protected: + private: /// Record what category of HybridFactor this is. Category category_ = Category::None; + + protected: // Set of DiscreteKeys for this factor. DiscreteKeys discreteKeys_; /// Record continuous keys for book-keeping @@ -140,10 +142,6 @@ class GTSAM_EXPORT HybridFactor : public Factor { /// @} - protected: - /// protected constructor to initialize the category - HybridFactor(Category category) : category_(category) {} - private: #ifdef GTSAM_ENABLE_BOOST_SERIALIZATION /** Serialization function */ From 2c12e685eae4e128808d9b968c0f8fa83d686b25 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 26 Sep 2024 14:50:50 -0700 Subject: [PATCH 349/398] Fix HybridNonlinearFactor --- gtsam/hybrid/HybridGaussianFactor.h | 9 ++-- gtsam/hybrid/HybridNonlinearFactor.cpp | 66 ++++++++++++++------------ gtsam/hybrid/HybridNonlinearFactor.h | 43 ++++++++++++----- 3 files changed, 70 insertions(+), 48 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index b20dc130c..46b21b8aa 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -196,16 +196,15 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { double potentiallyPrunedComponentError( const sharedFactor &gf, const VectorValues &continuousValues) const; - /// Helper struct to assist in constructing the HybridGaussianFactor + /// Helper struct to assist private constructor below. struct ConstructorHelper { KeyVector continuousKeys; // Continuous keys extracted from factors DiscreteKeys discreteKeys; // Discrete keys provided to the constructors FactorValuePairs pairs; // Used only if factorsTree is empty Factors factorsTree; - ConstructorHelper( - const DiscreteKey &discreteKey, - const std::vector &factorsVec); + ConstructorHelper(const DiscreteKey &discreteKey, + const std::vector &factors); ConstructorHelper(const DiscreteKey &discreteKey, const std::vector &factorPairs); @@ -214,7 +213,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { const FactorValuePairs &factorPairs); }; - // Private constructor using ConstructorHelper + // Private constructor using ConstructorHelper above. HybridGaussianFactor(const ConstructorHelper &helper) : Base(helper.continuousKeys, helper.discreteKeys), factors_(helper.factorsTree.empty() ? augment(helper.pairs) diff --git a/gtsam/hybrid/HybridNonlinearFactor.cpp b/gtsam/hybrid/HybridNonlinearFactor.cpp index 301d79593..351a7fea4 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.cpp +++ b/gtsam/hybrid/HybridNonlinearFactor.cpp @@ -18,55 +18,59 @@ #include +#include + +#include "gtsam/nonlinear/NonlinearFactor.h" + namespace gtsam { /* *******************************************************************************/ -static void checkKeys(const KeyVector& continuousKeys, - const std::vector& pairs) { - KeySet factor_keys_set; - for (const auto& pair : pairs) { - auto f = pair.first; - // Insert all factor continuous keys in the continuous keys set. - std::copy(f->keys().begin(), f->keys().end(), - std::inserter(factor_keys_set, factor_keys_set.end())); - } - - KeySet continuous_keys_set(continuousKeys.begin(), continuousKeys.end()); - if (continuous_keys_set != factor_keys_set) { +static void CopyOrCheckContinuousKeys(const NonlinearFactor::shared_ptr& factor, + KeyVector* continuousKeys) { + if (!factor) return; + if (continuousKeys->empty()) { + *continuousKeys = factor->keys(); + } else if (factor->keys() != *continuousKeys) { throw std::runtime_error( - "HybridNonlinearFactor: The specified continuous keys and the keys in " - "the factors do not match!"); + "HybridNonlinearFactor: all factors should have the same keys!"); } } /* *******************************************************************************/ -HybridNonlinearFactor::HybridNonlinearFactor( - const KeyVector& continuousKeys, const DiscreteKey& discreteKey, +HybridNonlinearFactor::ConstructorHelper::ConstructorHelper( + const DiscreteKey& discreteKey, const std::vector& factors) - : Base(continuousKeys, {discreteKey}) { + : discreteKeys({discreteKey}) { std::vector pairs; - for (auto&& f : factors) { - pairs.emplace_back(f, 0.0); + // Extract continuous keys from the first non-null factor + for (const auto& factor : factors) { + pairs.emplace_back(factor, 0.0); + CopyOrCheckContinuousKeys(factor, &continuousKeys); } - checkKeys(continuousKeys, pairs); - factors_ = FactorValuePairs({discreteKey}, pairs); + factorTree = FactorValuePairs({discreteKey}, pairs); } /* *******************************************************************************/ -HybridNonlinearFactor::HybridNonlinearFactor( - const KeyVector& continuousKeys, const DiscreteKey& discreteKey, +HybridNonlinearFactor::ConstructorHelper::ConstructorHelper( + const DiscreteKey& discreteKey, const std::vector& pairs) - : Base(continuousKeys, {discreteKey}) { - KeySet continuous_keys_set(continuousKeys.begin(), continuousKeys.end()); - checkKeys(continuousKeys, pairs); - factors_ = FactorValuePairs({discreteKey}, pairs); + : discreteKeys({discreteKey}) { + // Extract continuous keys from the first non-null factor + for (const auto& pair : pairs) { + CopyOrCheckContinuousKeys(pair.first, &continuousKeys); + } + factorTree = FactorValuePairs({discreteKey}, pairs); } /* *******************************************************************************/ -HybridNonlinearFactor::HybridNonlinearFactor(const KeyVector& continuousKeys, - const DiscreteKeys& discreteKeys, - const FactorValuePairs& factors) - : Base(continuousKeys, discreteKeys), factors_(factors) {} +HybridNonlinearFactor::ConstructorHelper::ConstructorHelper( + const DiscreteKeys& discreteKeys, const FactorValuePairs& factorPairs) + : discreteKeys(discreteKeys), factorTree(factorPairs) { + // Extract continuous keys from the first non-null factor + factorPairs.visit([&](const NonlinearFactorValuePair& pair) { + CopyOrCheckContinuousKeys(pair.first, &continuousKeys); + }); +} /* *******************************************************************************/ AlgebraicDecisionTree HybridNonlinearFactor::errorTree( diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 7843afc83..161a7b357 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -90,13 +90,12 @@ class GTSAM_EXPORT HybridNonlinearFactor : public HybridFactor { * providing the factors for each mode m as a vector of factors ϕ_m(x). * The value ϕ(x,m) for the factor is simply ϕ_m(x). * - * @param continuousKeys Vector of keys for continuous factors. * @param discreteKey The discrete key for the "mode", indexing components. * @param factors Vector of gaussian factors, one for each mode. */ - HybridNonlinearFactor( - const KeyVector& continuousKeys, const DiscreteKey& discreteKey, - const std::vector& factors); + HybridNonlinearFactor(const DiscreteKey& discreteKey, + const std::vector& factors) + : HybridNonlinearFactor(ConstructorHelper(discreteKey, factors)) {} /** * @brief Construct a new HybridNonlinearFactor on a single discrete key, @@ -104,13 +103,12 @@ class GTSAM_EXPORT HybridNonlinearFactor : public HybridFactor { * provided as a vector of pairs (ϕ_m(x), E_m). * The value ϕ(x,m) for the factor is now ϕ_m(x) + E_m. * - * @param continuousKeys Vector of keys for continuous factors. * @param discreteKey The discrete key for the "mode", indexing components. * @param pairs Vector of gaussian factor-scalar pairs, one per mode. */ - HybridNonlinearFactor(const KeyVector& continuousKeys, - const DiscreteKey& discreteKey, - const std::vector& pairs); + HybridNonlinearFactor(const DiscreteKey& discreteKey, + const std::vector& pairs) + : HybridNonlinearFactor(ConstructorHelper(discreteKey, pairs)) {} /** * @brief Construct a new HybridNonlinearFactor on a several discrete keys M, @@ -118,13 +116,12 @@ class GTSAM_EXPORT HybridNonlinearFactor : public HybridFactor { * scalars are provided as a DecisionTree of pairs (ϕ_M(x), E_M). * The value ϕ(x,M) for the factor is again ϕ_m(x) + E_m. * - * @param continuousKeys A vector of keys representing continuous variables. * @param discreteKeys Discrete variables and their cardinalities. * @param factors The decision tree of nonlinear factor/scalar pairs. */ - HybridNonlinearFactor(const KeyVector& continuousKeys, - const DiscreteKeys& discreteKeys, - const FactorValuePairs& factors); + HybridNonlinearFactor(const DiscreteKeys& discreteKeys, + const FactorValuePairs& factors) + : HybridNonlinearFactor(ConstructorHelper(discreteKeys, factors)) {} /** * @brief Compute error of the HybridNonlinearFactor as a tree. * @@ -181,6 +178,28 @@ class GTSAM_EXPORT HybridNonlinearFactor : public HybridFactor { /// Linearize all the continuous factors to get a HybridGaussianFactor. std::shared_ptr linearize( const Values& continuousValues) const; + + private: + /// Helper struct to assist private constructor below. + struct ConstructorHelper { + KeyVector continuousKeys; // Continuous keys extracted from factors + DiscreteKeys discreteKeys; // Discrete keys provided to the constructors + FactorValuePairs factorTree; + + ConstructorHelper(const DiscreteKey& discreteKey, + const std::vector& factors); + + ConstructorHelper(const DiscreteKey& discreteKey, + const std::vector& factorPairs); + + ConstructorHelper(const DiscreteKeys& discreteKeys, + const FactorValuePairs& factorPairs); + }; + + // Private constructor using ConstructorHelper above. + HybridNonlinearFactor(const ConstructorHelper& helper) + : Base(helper.continuousKeys, helper.discreteKeys), + factors_(helper.factorTree) {} }; // traits From bb4c3c95abb93ea6ab31d925cfc17d573179593a Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 26 Sep 2024 14:51:10 -0700 Subject: [PATCH 350/398] Fix call sites for HybridNonlinearFactor --- gtsam/hybrid/tests/Switching.h | 3 +- gtsam/hybrid/tests/testHybridBayesNet.cpp | 2 +- gtsam/hybrid/tests/testHybridEstimation.cpp | 11 ++----- gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 12 ++------ .../tests/testHybridNonlinearFactor.cpp | 10 +++---- .../tests/testHybridNonlinearFactorGraph.cpp | 30 ++----------------- .../hybrid/tests/testHybridNonlinearISAM.cpp | 12 ++------ 7 files changed, 19 insertions(+), 61 deletions(-) diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 9144e3b6d..23a102fd5 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -159,9 +159,8 @@ struct Switching { // Add "motion models". for (size_t k = 0; k < K - 1; k++) { - KeyVector keys = {X(k), X(k + 1)}; auto motion_models = motionModels(k, between_sigma); - nonlinearFactorGraph.emplace_shared(keys, modes[k], + nonlinearFactorGraph.emplace_shared(modes[k], motion_models); } diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index b049b03fd..8659100ac 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -388,7 +388,7 @@ TEST(HybridBayesNet, Sampling) { auto one_motion = std::make_shared>(X(0), X(1), 1, noise_model); nfg.emplace_shared( - KeyVector{X(0), X(1)}, DiscreteKey(M(0), 2), + DiscreteKey(M(0), 2), std::vector{zero_motion, one_motion}); DiscreteKey mode(M(0), 2); diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 2a20f5f5e..1b0286084 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -437,8 +437,7 @@ static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() { std::make_shared>(X(0), X(1), 1, noise_model); std::vector components = {zero_motion, one_motion}; - nfg.emplace_shared(KeyVector{X(0), X(1)}, m, - components); + nfg.emplace_shared(m, components); return nfg; } @@ -591,9 +590,7 @@ TEST(HybridEstimation, ModeSelection) { X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight)); std::vector components = {model0, model1}; - KeyVector keys = {X(0), X(1)}; - DiscreteKey modes(M(0), 2); - HybridNonlinearFactor mf(keys, modes, components); + HybridNonlinearFactor mf({M(0), 2}, components); initial.insert(X(0), 0.0); initial.insert(X(1), 0.0); @@ -681,9 +678,7 @@ TEST(HybridEstimation, ModeSelection2) { X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_tight)); std::vector components = {model0, model1}; - KeyVector keys = {X(0), X(1)}; - DiscreteKey modes(M(0), 2); - HybridNonlinearFactor mf(keys, modes, components); + HybridNonlinearFactor mf({M(0), 2}, components); initial.insert(X(0), Z_3x1); initial.insert(X(1), Z_3x1); diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index 9ce109647..280c6cb53 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -414,15 +414,13 @@ TEST(HybridGaussianISAM, NonTrivial) { // Add odometry factor with discrete modes. Pose2 odometry(1.0, 0.0, 0.0); - KeyVector contKeys = {W(0), W(1)}; auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); std::vector components; components.emplace_back( new PlanarMotionModel(W(0), W(1), odometry, noise_model)); // moving components.emplace_back( new PlanarMotionModel(W(0), W(1), Pose2(0, 0, 0), noise_model)); // still - fg.emplace_shared( - contKeys, gtsam::DiscreteKey(M(1), 2), components); + fg.emplace_shared(DiscreteKey(M(1), 2), components); // Add equivalent of ImuFactor fg.emplace_shared>(X(0), X(1), Pose2(1.0, 0.0, 0), @@ -454,14 +452,12 @@ TEST(HybridGaussianISAM, NonTrivial) { /*************** Run Round 3 ***************/ // Add odometry factor with discrete modes. - contKeys = {W(1), W(2)}; components.clear(); components.emplace_back( new PlanarMotionModel(W(1), W(2), odometry, noise_model)); // moving components.emplace_back( new PlanarMotionModel(W(1), W(2), Pose2(0, 0, 0), noise_model)); // still - fg.emplace_shared( - contKeys, gtsam::DiscreteKey(M(2), 2), components); + fg.emplace_shared(DiscreteKey(M(2), 2), components); // Add equivalent of ImuFactor fg.emplace_shared>(X(1), X(2), Pose2(1.0, 0.0, 0), @@ -496,14 +492,12 @@ TEST(HybridGaussianISAM, NonTrivial) { /*************** Run Round 4 ***************/ // Add odometry factor with discrete modes. - contKeys = {W(2), W(3)}; components.clear(); components.emplace_back( new PlanarMotionModel(W(2), W(3), odometry, noise_model)); // moving components.emplace_back( new PlanarMotionModel(W(2), W(3), Pose2(0, 0, 0), noise_model)); // still - fg.emplace_shared( - contKeys, gtsam::DiscreteKey(M(3), 2), components); + fg.emplace_shared(DiscreteKey(M(3), 2), components); // Add equivalent of ImuFactor fg.emplace_shared>(X(2), X(3), Pose2(1.0, 0.0, 0), diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp index d949a1e06..2b441ab13 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp @@ -60,14 +60,14 @@ auto f1 = std::make_shared>(X(1), X(2), between1, model); // Test simple to complex constructors... TEST(HybridGaussianFactor, ConstructorVariants) { using namespace test_constructor; - HybridNonlinearFactor fromFactors({X(1), X(2)}, m1, {f0, f1}); + HybridNonlinearFactor fromFactors(m1, {f0, f1}); std::vector pairs{{f0, 0.0}, {f1, 0.0}}; - HybridNonlinearFactor fromPairs({X(1), X(2)}, m1, pairs); + HybridNonlinearFactor fromPairs(m1, pairs); assert_equal(fromFactors, fromPairs); HybridNonlinearFactor::FactorValuePairs decisionTree({m1}, pairs); - HybridNonlinearFactor fromDecisionTree({X(1), X(2)}, {m1}, decisionTree); + HybridNonlinearFactor fromDecisionTree({m1}, decisionTree); assert_equal(fromDecisionTree, fromPairs); } @@ -75,7 +75,7 @@ TEST(HybridGaussianFactor, ConstructorVariants) { // Test .print() output. TEST(HybridNonlinearFactor, Printing) { using namespace test_constructor; - HybridNonlinearFactor hybridFactor({X(1), X(2)}, {m1}, {f0, f1}); + HybridNonlinearFactor hybridFactor({m1}, {f0, f1}); std::string expected = R"(Hybrid [x1 x2; 1] @@ -101,7 +101,7 @@ static HybridNonlinearFactor getHybridNonlinearFactor() { std::make_shared>(X(1), X(2), between0, model); auto f1 = std::make_shared>(X(1), X(2), between1, model); - return HybridNonlinearFactor({X(1), X(2)}, m1, {f0, f1}); + return HybridNonlinearFactor(m1, {f0, f1}); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 63a41a63a..f9a546c81 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -117,7 +117,6 @@ TEST(HybridNonlinearFactorGraph, Resize) { /***************************************************************************/ namespace test_motion { -KeyVector contKeys = {X(0), X(1)}; gtsam::DiscreteKey m1(M(1), 2); auto noise_model = noiseModel::Isotropic::Sigma(1, 1.0); std::vector components = { @@ -139,8 +138,7 @@ TEST(HybridGaussianFactorGraph, Resize) { auto discreteFactor = std::make_shared(); hnfg.push_back(discreteFactor); - auto dcFactor = - std::make_shared(contKeys, m1, components); + auto dcFactor = std::make_shared(m1, components); hnfg.push_back(dcFactor); Values linearizationPoint; @@ -156,26 +154,6 @@ TEST(HybridGaussianFactorGraph, Resize) { EXPECT_LONGS_EQUAL(gfg.size(), 0); } -/*************************************************************************** - * Test that the HybridNonlinearFactor reports correctly if the number of - * continuous keys provided do not match the keys in the factors. - */ -TEST(HybridGaussianFactorGraph, HybridNonlinearFactor) { - using namespace test_motion; - - auto nonlinearFactor = std::make_shared>( - X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1)); - auto discreteFactor = std::make_shared(); - - // Check for exception when number of continuous keys are under-specified. - THROWS_EXCEPTION( - std::make_shared(KeyVector{X(0)}, m1, components)); - - // Check for exception when number of continuous keys are too many. - THROWS_EXCEPTION(std::make_shared( - KeyVector{X(0), X(1), X(2)}, m1, components)); -} - /***************************************************************************** * Test push_back on HFG makes the correct distinction. */ @@ -828,14 +806,12 @@ TEST(HybridNonlinearFactorGraph, DefaultDecisionTree) { // Add odometry factor Pose2 odometry(2.0, 0.0, 0.0); - KeyVector contKeys = {X(0), X(1)}; auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); std::vector motion_models = { std::make_shared(X(0), X(1), Pose2(0, 0, 0), noise_model), std::make_shared(X(0), X(1), odometry, noise_model)}; - fg.emplace_shared( - contKeys, gtsam::DiscreteKey(M(1), 2), motion_models); + fg.emplace_shared(DiscreteKey{M(1), 2}, motion_models); // Add Range-Bearing measurements to from X0 to L0 and X1 to L1. // create a noise model for the landmark measurements @@ -901,7 +877,7 @@ static HybridNonlinearFactorGraph CreateFactorGraph( std::vector factors{{f0, model0->negLogConstant()}, {f1, model1->negLogConstant()}}; - HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors); + HybridNonlinearFactor mixtureFactor(m1, factors); HybridNonlinearFactorGraph hfg; hfg.push_back(mixtureFactor); diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index fe81fc13e..9e93116fc 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -433,15 +433,13 @@ TEST(HybridNonlinearISAM, NonTrivial) { /*************** Run Round 2 ***************/ // Add odometry factor with discrete modes. Pose2 odometry(1.0, 0.0, 0.0); - KeyVector contKeys = {W(0), W(1)}; auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); auto still = std::make_shared(W(0), W(1), Pose2(0, 0, 0), noise_model), moving = std::make_shared(W(0), W(1), odometry, noise_model); std::vector components{moving, still}; - fg.emplace_shared( - contKeys, gtsam::DiscreteKey(M(1), 2), components); + fg.emplace_shared(DiscreteKey(M(1), 2), components); // Add equivalent of ImuFactor fg.emplace_shared>(X(0), X(1), Pose2(1.0, 0.0, 0), @@ -473,14 +471,12 @@ TEST(HybridNonlinearISAM, NonTrivial) { /*************** Run Round 3 ***************/ // Add odometry factor with discrete modes. - contKeys = {W(1), W(2)}; still = std::make_shared(W(1), W(2), Pose2(0, 0, 0), noise_model); moving = std::make_shared(W(1), W(2), odometry, noise_model); components = {moving, still}; - fg.emplace_shared( - contKeys, gtsam::DiscreteKey(M(2), 2), components); + fg.emplace_shared(DiscreteKey(M(2), 2), components); // Add equivalent of ImuFactor fg.emplace_shared>(X(1), X(2), Pose2(1.0, 0.0, 0), @@ -515,14 +511,12 @@ TEST(HybridNonlinearISAM, NonTrivial) { /*************** Run Round 4 ***************/ // Add odometry factor with discrete modes. - contKeys = {W(2), W(3)}; still = std::make_shared(W(2), W(3), Pose2(0, 0, 0), noise_model); moving = std::make_shared(W(2), W(3), odometry, noise_model); components = {moving, still}; - fg.emplace_shared( - contKeys, gtsam::DiscreteKey(M(3), 2), components); + fg.emplace_shared(DiscreteKey(M(3), 2), components); // Add equivalent of ImuFactor fg.emplace_shared>(X(2), X(3), Pose2(1.0, 0.0, 0), From 1a566ea2bb8076d3da680855995777191b7e5877 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 26 Sep 2024 14:51:39 -0700 Subject: [PATCH 351/398] Fix wrapper --- gtsam/hybrid/hybrid.i | 16 +++++++--------- python/gtsam/tests/test_HybridFactorGraph.py | 5 ++--- 2 files changed, 9 insertions(+), 12 deletions(-) diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index d1b8fbf6d..9ed96d115 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -75,10 +75,12 @@ virtual class HybridConditional { #include class HybridGaussianFactor : gtsam::HybridFactor { HybridGaussianFactor( - const gtsam::KeyVector& continuousKeys, + const gtsam::DiscreteKey& discreteKey, + const std::vector& factors); + HybridGaussianFactor( const gtsam::DiscreteKey& discreteKey, const std::vector>& - factorsList); + factorPairs); void print(string s = "HybridGaussianFactor\n", const gtsam::KeyFormatter& keyFormatter = @@ -88,13 +90,9 @@ class HybridGaussianFactor : gtsam::HybridFactor { #include class HybridGaussianConditional : gtsam::HybridFactor { HybridGaussianConditional( - const gtsam::KeyVector& continuousFrontals, - const gtsam::KeyVector& continuousParents, const gtsam::DiscreteKeys& discreteParents, const gtsam::HybridGaussianConditional::Conditionals& conditionals); HybridGaussianConditional( - const gtsam::KeyVector& continuousFrontals, - const gtsam::KeyVector& continuousParents, const gtsam::DiscreteKey& discreteParent, const std::vector& conditionals); @@ -246,15 +244,15 @@ class HybridNonlinearFactorGraph { #include class HybridNonlinearFactor : gtsam::HybridFactor { HybridNonlinearFactor( - const gtsam::KeyVector& keys, const gtsam::DiscreteKey& discreteKey, + const gtsam::DiscreteKey& discreteKey, const std::vector& factors); HybridNonlinearFactor( - const gtsam::KeyVector& keys, const gtsam::DiscreteKey& discreteKey, + const gtsam::DiscreteKey& discreteKey, const std::vector>& factors); HybridNonlinearFactor( - const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys, + const gtsam::DiscreteKeys& discreteKeys, const gtsam::DecisionTree< gtsam::Key, std::pair>& factors); diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index 4ec1aec1e..3c63b9154 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -17,7 +17,7 @@ from gtsam.symbol_shorthand import C, M, X, Z from gtsam.utils.test_case import GtsamTestCase import gtsam -from gtsam import (DiscreteConditional, DiscreteKeys, GaussianConditional, +from gtsam import (DiscreteConditional, GaussianConditional, HybridBayesNet, HybridGaussianConditional, HybridGaussianFactor, HybridGaussianFactorGraph, HybridValues, JacobianFactor, noiseModel) @@ -102,8 +102,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): X(0), [0], sigma=3) bayesNet.push_back( - HybridGaussianConditional([Z(i)], [X(0)], mode, - [conditional0, conditional1])) + HybridGaussianConditional(mode, [conditional0, conditional1])) # Create prior on X(0). prior_on_x0 = GaussianConditional.FromMeanAndStddev( From e0ce34833a5d27b142e0b3aad70bf7a99648d966 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 26 Sep 2024 18:48:44 -0400 Subject: [PATCH 352/398] remove postfix for python when compiling with Timing and Profiling --- python/CMakeLists.txt | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 501218924..d11f414a7 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -118,6 +118,8 @@ set_target_properties(${GTSAM_PYTHON_TARGET} PROPERTIES LIBRARY_OUTPUT_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/gtsam" DEBUG_POSTFIX "" # Otherwise you will have a wrong name RELWITHDEBINFO_POSTFIX "" # Otherwise you will have a wrong name + TIMING_POSTFIX "" # Otherwise you will have a wrong name + PROFILING_POSTFIX "" # Otherwise you will have a wrong name ) if(WIN32) @@ -218,6 +220,8 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON) LIBRARY_OUTPUT_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/gtsam_unstable" DEBUG_POSTFIX "" # Otherwise you will have a wrong name RELWITHDEBINFO_POSTFIX "" # Otherwise you will have a wrong name + TIMING_POSTFIX "" # Otherwise you will have a wrong name + PROFILING_POSTFIX "" # Otherwise you will have a wrong name ) set(GTSAM_UNSTABLE_MODULE_PATH ${GTSAM_PYTHON_BUILD_DIRECTORY}/gtsam_unstable) From 5c75a52fa6378f9371e83d2996c7beb4a6fcf862 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 26 Sep 2024 18:50:50 -0400 Subject: [PATCH 353/398] remove extra whitespaces --- python/CMakeLists.txt | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index d11f414a7..41dd4b141 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -8,7 +8,7 @@ endif() # Generate setup.py. file(READ "${PROJECT_SOURCE_DIR}/README.md" README_CONTENTS) configure_file(${PROJECT_PYTHON_SOURCE_DIR}/setup.py.in - ${GTSAM_PYTHON_BUILD_DIRECTORY}/setup.py) + ${GTSAM_PYTHON_BUILD_DIRECTORY}/setup.py) # Supply MANIFEST.in for older versions of Python file(COPY ${PROJECT_PYTHON_SOURCE_DIR}/MANIFEST.in @@ -23,10 +23,9 @@ include(PybindWrap) # Load the pybind11 code - # This is required to avoid an error in modern pybind11 cmake scripts: if(POLICY CMP0057) - cmake_policy(SET CMP0057 NEW) + cmake_policy(SET CMP0057 NEW) endif() # Use bundled pybind11 version @@ -129,14 +128,14 @@ if(WIN32) ADD_CUSTOM_COMMAND(TARGET ${GTSAM_PYTHON_TARGET} POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy_if_different "${GTSAM_PYTHON_BUILD_DIRECTORY}/gtsam/${GTSAM_OUTPUT_NAME}.pyd" - "${GTSAM_PYTHON_BUILD_DIRECTORY}/gtsam/gtsam.pyd" + "${GTSAM_PYTHON_BUILD_DIRECTORY}/gtsam/gtsam.pyd" ) ADD_CUSTOM_COMMAND(TARGET ${GTSAM_PYTHON_TARGET} POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy_if_different "$;$" - "${GTSAM_PYTHON_BUILD_DIRECTORY}/gtsam/" + "${GTSAM_PYTHON_BUILD_DIRECTORY}/gtsam/" COMMAND_EXPAND_LISTS - VERBATIM + VERBATIM ) endif() @@ -200,7 +199,6 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON) gtsam::gtsfm::KeypointsVector gtsam::gtsfm::SfmTrack2dVector) - pybind_wrap(${GTSAM_PYTHON_UNSTABLE_TARGET} # target ${PROJECT_SOURCE_DIR}/gtsam_unstable/gtsam_unstable.i # interface_header "gtsam_unstable.cpp" # generated_cpp @@ -228,7 +226,7 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON) # Copy all python files to build folder. copy_directory("${CMAKE_CURRENT_SOURCE_DIR}/gtsam_unstable" - "${GTSAM_UNSTABLE_MODULE_PATH}") + "${GTSAM_UNSTABLE_MODULE_PATH}") # Hack to get python test files copied every time they are modified file(GLOB GTSAM_UNSTABLE_PYTHON_TEST_FILES RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}/gtsam_unstable/" "${CMAKE_CURRENT_SOURCE_DIR}/gtsam_unstable/tests/*.py") @@ -241,7 +239,7 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON) if(WIN32) set_target_properties(${GTSAM_PYTHON_UNSTABLE_TARGET} PROPERTIES SUFFIX ".pyd" - ) + ) ADD_CUSTOM_COMMAND(TARGET ${GTSAM_PYTHON_UNSTABLE_TARGET} POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy_if_different "${GTSAM_PYTHON_BUILD_DIRECTORY}/gtsam_unstable/${GTSAM_UNSTABLE_OUTPUT_NAME}.pyd" @@ -252,7 +250,7 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON) "$;$" "${GTSAM_PYTHON_BUILD_DIRECTORY}/gtsam_unstable/" COMMAND_EXPAND_LISTS - VERBATIM + VERBATIM ) endif() From 234dd0930cf60b33c21b56afe844bae0fda97c08 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 26 Sep 2024 18:51:18 -0400 Subject: [PATCH 354/398] common macro for setting python library properties --- python/CMakeLists.txt | 35 +++++++++++++++-------------------- 1 file changed, 15 insertions(+), 20 deletions(-) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 41dd4b141..9070d191b 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -18,6 +18,19 @@ set(WRAP_BUILD_TYPE_POSTFIXES ${GTSAM_BUILD_TYPE_POSTFIXES}) include(PybindWrap) +macro(SET_PYTHON_TARGET_PROPERTIES PYTHON_TARGET OUTPUT_NAME OUTPUT_DIRECTORY) + set_target_properties(${PYTHON_TARGET} PROPERTIES + INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/lib" + INSTALL_RPATH_USE_LINK_PATH TRUE + OUTPUT_NAME "${OUTPUT_NAME}" + LIBRARY_OUTPUT_DIRECTORY "${OUTPUT_DIRECTORY}" + DEBUG_POSTFIX "" # Otherwise you will have a wrong name + RELWITHDEBINFO_POSTFIX "" # Otherwise you will have a wrong name + TIMING_POSTFIX "" # Otherwise you will have a wrong name + PROFILING_POSTFIX "" # Otherwise you will have a wrong name + ) +endmacro() + ############################################################ ## Load the necessary files to compile the wrapper @@ -110,16 +123,7 @@ pybind_wrap(${GTSAM_PYTHON_TARGET} # target ${GTSAM_ENABLE_BOOST_SERIALIZATION} # use_boost_serialization ) -set_target_properties(${GTSAM_PYTHON_TARGET} PROPERTIES - INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/lib" - INSTALL_RPATH_USE_LINK_PATH TRUE - OUTPUT_NAME "${GTSAM_OUTPUT_NAME}" - LIBRARY_OUTPUT_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/gtsam" - DEBUG_POSTFIX "" # Otherwise you will have a wrong name - RELWITHDEBINFO_POSTFIX "" # Otherwise you will have a wrong name - TIMING_POSTFIX "" # Otherwise you will have a wrong name - PROFILING_POSTFIX "" # Otherwise you will have a wrong name - ) +SET_PYTHON_TARGET_PROPERTIES(${GTSAM_PYTHON_TARGET} ${GTSAM_OUTPUT_NAME} "${GTSAM_PYTHON_BUILD_DIRECTORY}/gtsam") if(WIN32) set_target_properties(${GTSAM_PYTHON_TARGET} PROPERTIES @@ -211,16 +215,7 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON) ${GTSAM_ENABLE_BOOST_SERIALIZATION} # use_boost_serialization ) - set_target_properties(${GTSAM_PYTHON_UNSTABLE_TARGET} PROPERTIES - INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/lib" - INSTALL_RPATH_USE_LINK_PATH TRUE - OUTPUT_NAME "${GTSAM_UNSTABLE_OUTPUT_NAME}" - LIBRARY_OUTPUT_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/gtsam_unstable" - DEBUG_POSTFIX "" # Otherwise you will have a wrong name - RELWITHDEBINFO_POSTFIX "" # Otherwise you will have a wrong name - TIMING_POSTFIX "" # Otherwise you will have a wrong name - PROFILING_POSTFIX "" # Otherwise you will have a wrong name - ) + SET_PYTHON_TARGET_PROPERTIES(${GTSAM_PYTHON_UNSTABLE_TARGET} ${GTSAM_UNSTABLE_OUTPUT_NAME} "${GTSAM_PYTHON_BUILD_DIRECTORY}/gtsam_unstable") set(GTSAM_UNSTABLE_MODULE_PATH ${GTSAM_PYTHON_BUILD_DIRECTORY}/gtsam_unstable) From 95098e0cade22f672f3b426099feb91c2a1863d7 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 26 Sep 2024 18:51:30 -0400 Subject: [PATCH 355/398] add missing preprocessor guard --- gtsam/sfm/TranslationFactor.h | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gtsam/sfm/TranslationFactor.h b/gtsam/sfm/TranslationFactor.h index 9af3b184e..33bc82f5a 100644 --- a/gtsam/sfm/TranslationFactor.h +++ b/gtsam/sfm/TranslationFactor.h @@ -149,11 +149,13 @@ class BilinearAngleTranslationFactor } private: +#ifdef GTSAM_ENABLE_BOOST_SERIALIZATION friend class boost::serialization::access; template void serialize(ARCHIVE& ar, const unsigned int /*version*/) { ar& boost::serialization::make_nvp( "Base", boost::serialization::base_object(*this)); } +#endif }; // \ BilinearAngleTranslationFactor } // namespace gtsam From 71d5a6c1f14e2a053ac6fd48099577459efb079c Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 26 Sep 2024 16:28:56 -0700 Subject: [PATCH 356/398] Fix more wrapper tests --- python/gtsam/tests/test_HybridBayesNet.py | 5 ++--- python/gtsam/tests/test_HybridFactorGraph.py | 4 ++-- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/python/gtsam/tests/test_HybridBayesNet.py b/python/gtsam/tests/test_HybridBayesNet.py index bf2b6a033..57346d4d4 100644 --- a/python/gtsam/tests/test_HybridBayesNet.py +++ b/python/gtsam/tests/test_HybridBayesNet.py @@ -17,7 +17,7 @@ import numpy as np from gtsam.symbol_shorthand import A, X from gtsam.utils.test_case import GtsamTestCase -from gtsam import (DiscreteConditional, DiscreteKeys, DiscreteValues, +from gtsam import (DiscreteConditional, DiscreteValues, GaussianConditional, HybridBayesNet, HybridGaussianConditional, HybridValues, VectorValues, noiseModel) @@ -48,8 +48,7 @@ class TestHybridBayesNet(GtsamTestCase): bayesNet = HybridBayesNet() bayesNet.push_back(conditional) bayesNet.push_back( - HybridGaussianConditional([X(1)], [], Asia, - [conditional0, conditional1])) + HybridGaussianConditional(Asia, [conditional0, conditional1])) bayesNet.push_back(DiscreteConditional(Asia, "99/1")) # Create values at which to evaluate. diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index 3c63b9154..6d609deb0 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -35,7 +35,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model) jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model) - gmf = HybridGaussianFactor([X(0)], (C(0), 2), [(jf1, 0), (jf2, 0)]) + gmf = HybridGaussianFactor((C(0), 2), [(jf1, 0), (jf2, 0)]) hfg = HybridGaussianFactorGraph() hfg.push_back(jf1) @@ -60,7 +60,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model) jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model) - gmf = HybridGaussianFactor([X(0)], (C(0), 2), [(jf1, 0), (jf2, 0)]) + gmf = HybridGaussianFactor((C(0), 2), [(jf1, 0), (jf2, 0)]) hfg = HybridGaussianFactorGraph() hfg.push_back(jf1) From 3e6227f2b520452d60b26854ac0f633cf4e4a06b Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 26 Sep 2024 16:47:07 -0700 Subject: [PATCH 357/398] Move code to cpp file. --- gtsam/hybrid/HybridGaussianConditional.cpp | 49 ++++++---- gtsam/hybrid/HybridGaussianConditional.h | 14 +-- gtsam/hybrid/HybridGaussianFactor.cpp | 105 +++++++++++++-------- gtsam/hybrid/HybridGaussianFactor.h | 30 +----- gtsam/hybrid/HybridNonlinearFactor.cpp | 93 ++++++++++-------- gtsam/hybrid/HybridNonlinearFactor.h | 32 ++----- 6 files changed, 166 insertions(+), 157 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index f3a699094..87dff7e59 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -29,34 +29,47 @@ namespace gtsam { /* *******************************************************************************/ -HybridGaussianConditional::ConstructorHelper::ConstructorHelper( - const HybridGaussianConditional::Conditionals &conditionals) { - negLogConstant = std::numeric_limits::infinity(); +struct HybridGaussianConditional::ConstructorHelper { + KeyVector frontals, parents; + HybridGaussianFactor::FactorValuePairs pairs; + double negLogConstant; + /// Compute all variables needed for the private constructor below. + ConstructorHelper(const Conditionals &conditionals) { + negLogConstant = std::numeric_limits::infinity(); - auto func = - [&](const GaussianConditional::shared_ptr &c) -> GaussianFactorValuePair { - double value = 0.0; - if (c) { - if (frontals.empty()) { - frontals = KeyVector(c->frontals().begin(), c->frontals().end()); - parents = KeyVector(c->parents().begin(), c->parents().end()); + auto func = [&](const GaussianConditional::shared_ptr &c) + -> GaussianFactorValuePair { + double value = 0.0; + if (c) { + if (frontals.empty()) { + frontals = KeyVector(c->frontals().begin(), c->frontals().end()); + parents = KeyVector(c->parents().begin(), c->parents().end()); + } + value = c->negLogConstant(); + negLogConstant = std::min(negLogConstant, value); } - value = c->negLogConstant(); - negLogConstant = std::min(negLogConstant, value); - } - return {std::dynamic_pointer_cast(c), value}; - }; - pairs = HybridGaussianFactor::FactorValuePairs(conditionals, func); -} + return {std::dynamic_pointer_cast(c), value}; + }; + pairs = HybridGaussianFactor::FactorValuePairs(conditionals, func); + } +}; /* *******************************************************************************/ +HybridGaussianConditional::HybridGaussianConditional( + const DiscreteKeys &discreteParents, + const HybridGaussianConditional::Conditionals &conditionals, + const ConstructorHelper &helper) + : BaseFactor(discreteParents, helper.pairs), + BaseConditional(helper.frontals.size()), + conditionals_(conditionals), + negLogConstant_(helper.negLogConstant) {} + HybridGaussianConditional::HybridGaussianConditional( const DiscreteKeys &discreteParents, const HybridGaussianConditional::Conditionals &conditionals) : HybridGaussianConditional(discreteParents, conditionals, ConstructorHelper(conditionals)) {} -/* *******************************************************************************/ HybridGaussianConditional::HybridGaussianConditional( const DiscreteKey &discreteParent, const std::vector &conditionals) diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 15ef43173..b7e2ff024 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -185,23 +185,13 @@ class GTSAM_EXPORT HybridGaussianConditional private: /// Helper struct for private constructor. - struct ConstructorHelper { - KeyVector frontals, parents; - HybridGaussianFactor::FactorValuePairs pairs; - double negLogConstant; - /// Compute all variables needed for the private constructor below. - ConstructorHelper(const Conditionals &conditionals); - }; + struct ConstructorHelper; /// Private constructor that uses helper struct above. HybridGaussianConditional( const DiscreteKeys &discreteParents, const HybridGaussianConditional::Conditionals &conditionals, - const ConstructorHelper &helper) - : BaseFactor(discreteParents, helper.pairs), - BaseConditional(helper.frontals.size()), - conditionals_(conditionals), - negLogConstant_(helper.negLogConstant) {} + const ConstructorHelper &helper); /// Convert to a DecisionTree of Gaussian factor graphs. GaussianFactorGraphTree asGaussianFactorGraphTree() const; diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index 938a79b19..3672eb2e2 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -70,53 +70,76 @@ HybridGaussianFactor::Factors HybridGaussianFactor::augment( } /* *******************************************************************************/ -HybridGaussianFactor::ConstructorHelper::ConstructorHelper( +struct HybridGaussianFactor::ConstructorHelper { + KeyVector continuousKeys; // Continuous keys extracted from factors + DiscreteKeys discreteKeys; // Discrete keys provided to the constructors + FactorValuePairs pairs; // Used only if factorsTree is empty + Factors factorsTree; + + ConstructorHelper(const DiscreteKey &discreteKey, + const std::vector &factors) + : discreteKeys({discreteKey}) { + // Extract continuous keys from the first non-null factor + for (const auto &factor : factors) { + if (factor && continuousKeys.empty()) { + continuousKeys = factor->keys(); + break; + } + } + + // Build the DecisionTree from the factor vector + factorsTree = Factors(discreteKeys, factors); + } + + ConstructorHelper(const DiscreteKey &discreteKey, + const std::vector &factorPairs) + : discreteKeys({discreteKey}) { + // Extract continuous keys from the first non-null factor + for (const auto &pair : factorPairs) { + if (pair.first && continuousKeys.empty()) { + continuousKeys = pair.first->keys(); + break; + } + } + + // Build the FactorValuePairs DecisionTree + pairs = FactorValuePairs(discreteKeys, factorPairs); + } + + ConstructorHelper(const DiscreteKeys &discreteKeys, + const FactorValuePairs &factorPairs) + : discreteKeys(discreteKeys) { + // Extract continuous keys from the first non-null factor + factorPairs.visit([&](const GaussianFactorValuePair &pair) { + if (pair.first && continuousKeys.empty()) { + continuousKeys = pair.first->keys(); + } + }); + + // Build the FactorValuePairs DecisionTree + pairs = factorPairs; + } +}; + +/* *******************************************************************************/ +HybridGaussianFactor::HybridGaussianFactor(const ConstructorHelper &helper) + : Base(helper.continuousKeys, helper.discreteKeys), + factors_(helper.factorsTree.empty() ? augment(helper.pairs) + : helper.factorsTree) {} + +HybridGaussianFactor::HybridGaussianFactor( const DiscreteKey &discreteKey, const std::vector &factors) - : discreteKeys({discreteKey}) { - // Extract continuous keys from the first non-null factor - for (const auto &factor : factors) { - if (factor && continuousKeys.empty()) { - continuousKeys = factor->keys(); - break; - } - } + : HybridGaussianFactor(ConstructorHelper(discreteKey, factors)) {} - // Build the DecisionTree from the factor vector - factorsTree = Factors(discreteKeys, factors); -} - -/* *******************************************************************************/ -HybridGaussianFactor::ConstructorHelper::ConstructorHelper( +HybridGaussianFactor::HybridGaussianFactor( const DiscreteKey &discreteKey, const std::vector &factorPairs) - : discreteKeys({discreteKey}) { - // Extract continuous keys from the first non-null factor - for (const auto &pair : factorPairs) { - if (pair.first && continuousKeys.empty()) { - continuousKeys = pair.first->keys(); - break; - } - } + : HybridGaussianFactor(ConstructorHelper(discreteKey, factorPairs)) {} - // Build the FactorValuePairs DecisionTree - pairs = FactorValuePairs(discreteKeys, factorPairs); -} - -/* *******************************************************************************/ -HybridGaussianFactor::ConstructorHelper::ConstructorHelper( - const DiscreteKeys &discreteKeys, const FactorValuePairs &factorPairs) - : discreteKeys(discreteKeys) { - // Extract continuous keys from the first non-null factor - factorPairs.visit([&](const GaussianFactorValuePair &pair) { - if (pair.first && continuousKeys.empty()) { - continuousKeys = pair.first->keys(); - } - }); - - // Build the FactorValuePairs DecisionTree - pairs = factorPairs; -} +HybridGaussianFactor::HybridGaussianFactor(const DiscreteKeys &discreteKeys, + const FactorValuePairs &factors) + : HybridGaussianFactor(ConstructorHelper(discreteKeys, factors)) {} /* *******************************************************************************/ bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const { diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 46b21b8aa..972ee47ba 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -89,8 +89,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * @param factors Vector of gaussian factors, one for each mode. */ HybridGaussianFactor(const DiscreteKey &discreteKey, - const std::vector &factors) - : HybridGaussianFactor(ConstructorHelper(discreteKey, factors)) {} + const std::vector &factors); /** * @brief Construct a new HybridGaussianFactor on a single discrete key, @@ -102,8 +101,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * @param factorPairs Vector of gaussian factor-scalar pairs, one per mode. */ HybridGaussianFactor(const DiscreteKey &discreteKey, - const std::vector &factorPairs) - : HybridGaussianFactor(ConstructorHelper(discreteKey, factorPairs)) {} + const std::vector &factorPairs); /** * @brief Construct a new HybridGaussianFactor on a several discrete keys M, @@ -115,8 +113,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * @param factors The decision tree of Gaussian factor/scalar pairs. */ HybridGaussianFactor(const DiscreteKeys &discreteKeys, - const FactorValuePairs &factors) - : HybridGaussianFactor(ConstructorHelper(discreteKeys, factors)) {} + const FactorValuePairs &factors); /// @} /// @name Testable @@ -197,27 +194,10 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { const sharedFactor &gf, const VectorValues &continuousValues) const; /// Helper struct to assist private constructor below. - struct ConstructorHelper { - KeyVector continuousKeys; // Continuous keys extracted from factors - DiscreteKeys discreteKeys; // Discrete keys provided to the constructors - FactorValuePairs pairs; // Used only if factorsTree is empty - Factors factorsTree; - - ConstructorHelper(const DiscreteKey &discreteKey, - const std::vector &factors); - - ConstructorHelper(const DiscreteKey &discreteKey, - const std::vector &factorPairs); - - ConstructorHelper(const DiscreteKeys &discreteKeys, - const FactorValuePairs &factorPairs); - }; + struct ConstructorHelper; // Private constructor using ConstructorHelper above. - HybridGaussianFactor(const ConstructorHelper &helper) - : Base(helper.continuousKeys, helper.discreteKeys), - factors_(helper.factorsTree.empty() ? augment(helper.pairs) - : helper.factorsTree) {} + HybridGaussianFactor(const ConstructorHelper &helper); #ifdef GTSAM_ENABLE_BOOST_SERIALIZATION /** Serialization function */ diff --git a/gtsam/hybrid/HybridNonlinearFactor.cpp b/gtsam/hybrid/HybridNonlinearFactor.cpp index 351a7fea4..ac7e943b4 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.cpp +++ b/gtsam/hybrid/HybridNonlinearFactor.cpp @@ -25,52 +25,71 @@ namespace gtsam { /* *******************************************************************************/ -static void CopyOrCheckContinuousKeys(const NonlinearFactor::shared_ptr& factor, - KeyVector* continuousKeys) { - if (!factor) return; - if (continuousKeys->empty()) { - *continuousKeys = factor->keys(); - } else if (factor->keys() != *continuousKeys) { - throw std::runtime_error( - "HybridNonlinearFactor: all factors should have the same keys!"); +struct HybridNonlinearFactor::ConstructorHelper { + KeyVector continuousKeys; // Continuous keys extracted from factors + DiscreteKeys discreteKeys; // Discrete keys provided to the constructors + FactorValuePairs factorTree; + + void copyOrCheckContinuousKeys(const NonlinearFactor::shared_ptr& factor) { + if (!factor) return; + if (continuousKeys.empty()) { + continuousKeys = factor->keys(); + } else if (factor->keys() != continuousKeys) { + throw std::runtime_error( + "HybridNonlinearFactor: all factors should have the same keys!"); + } } -} + + ConstructorHelper(const DiscreteKey& discreteKey, + const std::vector& factors) + : discreteKeys({discreteKey}) { + std::vector pairs; + // Extract continuous keys from the first non-null factor + for (const auto& factor : factors) { + pairs.emplace_back(factor, 0.0); + copyOrCheckContinuousKeys(factor); + } + factorTree = FactorValuePairs({discreteKey}, pairs); + } + + ConstructorHelper(const DiscreteKey& discreteKey, + const std::vector& pairs) + : discreteKeys({discreteKey}) { + // Extract continuous keys from the first non-null factor + for (const auto& pair : pairs) { + copyOrCheckContinuousKeys(pair.first); + } + factorTree = FactorValuePairs({discreteKey}, pairs); + } + + ConstructorHelper(const DiscreteKeys& discreteKeys, + const FactorValuePairs& factorPairs) + : discreteKeys(discreteKeys), factorTree(factorPairs) { + // Extract continuous keys from the first non-null factor + factorPairs.visit([&](const NonlinearFactorValuePair& pair) { + copyOrCheckContinuousKeys(pair.first); + }); + } +}; /* *******************************************************************************/ -HybridNonlinearFactor::ConstructorHelper::ConstructorHelper( +HybridNonlinearFactor::HybridNonlinearFactor(const ConstructorHelper& helper) + : Base(helper.continuousKeys, helper.discreteKeys), + factors_(helper.factorTree) {} + +HybridNonlinearFactor::HybridNonlinearFactor( const DiscreteKey& discreteKey, const std::vector& factors) - : discreteKeys({discreteKey}) { - std::vector pairs; - // Extract continuous keys from the first non-null factor - for (const auto& factor : factors) { - pairs.emplace_back(factor, 0.0); - CopyOrCheckContinuousKeys(factor, &continuousKeys); - } - factorTree = FactorValuePairs({discreteKey}, pairs); -} + : HybridNonlinearFactor(ConstructorHelper(discreteKey, factors)) {} -/* *******************************************************************************/ -HybridNonlinearFactor::ConstructorHelper::ConstructorHelper( +HybridNonlinearFactor::HybridNonlinearFactor( const DiscreteKey& discreteKey, const std::vector& pairs) - : discreteKeys({discreteKey}) { - // Extract continuous keys from the first non-null factor - for (const auto& pair : pairs) { - CopyOrCheckContinuousKeys(pair.first, &continuousKeys); - } - factorTree = FactorValuePairs({discreteKey}, pairs); -} + : HybridNonlinearFactor(ConstructorHelper(discreteKey, pairs)) {} -/* *******************************************************************************/ -HybridNonlinearFactor::ConstructorHelper::ConstructorHelper( - const DiscreteKeys& discreteKeys, const FactorValuePairs& factorPairs) - : discreteKeys(discreteKeys), factorTree(factorPairs) { - // Extract continuous keys from the first non-null factor - factorPairs.visit([&](const NonlinearFactorValuePair& pair) { - CopyOrCheckContinuousKeys(pair.first, &continuousKeys); - }); -} +HybridNonlinearFactor::HybridNonlinearFactor(const DiscreteKeys& discreteKeys, + const FactorValuePairs& factors) + : HybridNonlinearFactor(ConstructorHelper(discreteKeys, factors)) {} /* *******************************************************************************/ AlgebraicDecisionTree HybridNonlinearFactor::errorTree( diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 161a7b357..a3b6ad4d9 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -93,9 +93,9 @@ class GTSAM_EXPORT HybridNonlinearFactor : public HybridFactor { * @param discreteKey The discrete key for the "mode", indexing components. * @param factors Vector of gaussian factors, one for each mode. */ - HybridNonlinearFactor(const DiscreteKey& discreteKey, - const std::vector& factors) - : HybridNonlinearFactor(ConstructorHelper(discreteKey, factors)) {} + HybridNonlinearFactor( + const DiscreteKey& discreteKey, + const std::vector& factors); /** * @brief Construct a new HybridNonlinearFactor on a single discrete key, @@ -107,8 +107,7 @@ class GTSAM_EXPORT HybridNonlinearFactor : public HybridFactor { * @param pairs Vector of gaussian factor-scalar pairs, one per mode. */ HybridNonlinearFactor(const DiscreteKey& discreteKey, - const std::vector& pairs) - : HybridNonlinearFactor(ConstructorHelper(discreteKey, pairs)) {} + const std::vector& pairs); /** * @brief Construct a new HybridNonlinearFactor on a several discrete keys M, @@ -120,8 +119,8 @@ class GTSAM_EXPORT HybridNonlinearFactor : public HybridFactor { * @param factors The decision tree of nonlinear factor/scalar pairs. */ HybridNonlinearFactor(const DiscreteKeys& discreteKeys, - const FactorValuePairs& factors) - : HybridNonlinearFactor(ConstructorHelper(discreteKeys, factors)) {} + const FactorValuePairs& factors); + /** * @brief Compute error of the HybridNonlinearFactor as a tree. * @@ -181,25 +180,10 @@ class GTSAM_EXPORT HybridNonlinearFactor : public HybridFactor { private: /// Helper struct to assist private constructor below. - struct ConstructorHelper { - KeyVector continuousKeys; // Continuous keys extracted from factors - DiscreteKeys discreteKeys; // Discrete keys provided to the constructors - FactorValuePairs factorTree; - - ConstructorHelper(const DiscreteKey& discreteKey, - const std::vector& factors); - - ConstructorHelper(const DiscreteKey& discreteKey, - const std::vector& factorPairs); - - ConstructorHelper(const DiscreteKeys& discreteKeys, - const FactorValuePairs& factorPairs); - }; + struct ConstructorHelper; // Private constructor using ConstructorHelper above. - HybridNonlinearFactor(const ConstructorHelper& helper) - : Base(helper.continuousKeys, helper.discreteKeys), - factors_(helper.factorTree) {} + HybridNonlinearFactor(const ConstructorHelper& helper); }; // traits From d93ebeafde1c7ce27c5093d63bdbce2180b3f4c6 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 26 Sep 2024 17:23:44 -0700 Subject: [PATCH 358/398] Yet another python test --- python/gtsam/tests/test_HybridNonlinearFactorGraph.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/python/gtsam/tests/test_HybridNonlinearFactorGraph.py b/python/gtsam/tests/test_HybridNonlinearFactorGraph.py index 4a1abdcf3..1880c18f2 100644 --- a/python/gtsam/tests/test_HybridNonlinearFactorGraph.py +++ b/python/gtsam/tests/test_HybridNonlinearFactorGraph.py @@ -14,8 +14,6 @@ from __future__ import print_function import unittest -import numpy as np -from gtsam.symbol_shorthand import C, X from gtsam.utils.test_case import GtsamTestCase import gtsam @@ -38,8 +36,9 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): noiseModel.Unit.Create(3)), 0.0), (PriorFactorPoint3(1, Point3(1, 2, 1), noiseModel.Unit.Create(3)), 0.0)] - nlfg.push_back(gtsam.HybridNonlinearFactor([1], (10, 2), factors)) - nlfg.push_back(gtsam.DecisionTreeFactor((10, 2), "1 3")) + mode = (10, 2) + nlfg.push_back(gtsam.HybridNonlinearFactor(mode, factors)) + nlfg.push_back(gtsam.DecisionTreeFactor(mode, "1 3")) values = gtsam.Values() values.insert_point3(1, Point3(0, 0, 0)) values.insert_point3(2, Point3(2, 3, 1)) From 14ad127094e8b0638bec2baeaa6f4161050998d0 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Fri, 27 Sep 2024 06:48:23 -0700 Subject: [PATCH 359/398] Review comments --- gtsam/hybrid/HybridGaussianFactor.cpp | 3 +-- gtsam/hybrid/HybridGaussianFactor.h | 2 +- gtsam/hybrid/HybridNonlinearFactor.cpp | 3 +-- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index 3672eb2e2..6f33c5328 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -21,13 +21,12 @@ #include #include #include +#include #include #include #include #include -#include "gtsam/hybrid/HybridFactor.h" - namespace gtsam { /* *******************************************************************************/ diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 972ee47ba..f23d065b6 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -69,7 +69,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// typedef for Decision Tree of Gaussian factors. using Factors = DecisionTree; - protected: + private: /// Decision tree of Gaussian factors indexed by discrete keys. Factors factors_; diff --git a/gtsam/hybrid/HybridNonlinearFactor.cpp b/gtsam/hybrid/HybridNonlinearFactor.cpp index ac7e943b4..7b63c0dff 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.cpp +++ b/gtsam/hybrid/HybridNonlinearFactor.cpp @@ -17,11 +17,10 @@ */ #include +#include #include -#include "gtsam/nonlinear/NonlinearFactor.h" - namespace gtsam { /* *******************************************************************************/ From bc555aef0a0a6e152c92a85a88635d8d97bb413c Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Fri, 27 Sep 2024 06:49:07 -0700 Subject: [PATCH 360/398] Changed order --- gtsam/hybrid/HybridGaussianConditional.h | 26 +++++++++++------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index b7e2ff024..e0bf66e52 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -77,7 +77,18 @@ class GTSAM_EXPORT HybridGaussianConditional HybridGaussianConditional() = default; /** - * @brief Construct a new HybridGaussianConditional object. + * @brief Construct from one discrete key and vector of conditionals. + * + * @param discreteParent Single discrete parent variable + * @param conditionals Vector of conditionals with the same size as the + * cardinality of the discrete parent. + */ + HybridGaussianConditional( + const DiscreteKey &discreteParent, + const std::vector &conditionals); + + /** + * @brief Construct from multiple discrete keys and conditional tree. * * @param discreteParents the discrete parents. Will be placed last. * @param conditionals a decision tree of GaussianConditionals. The number of @@ -88,19 +99,6 @@ class GTSAM_EXPORT HybridGaussianConditional HybridGaussianConditional(const DiscreteKeys &discreteParents, const Conditionals &conditionals); - /** - * @brief Make a Hybrid Gaussian Conditional from - * a vector of Gaussian conditionals. - * The DecisionTree-based constructor is preferred over this one. - * - * @param discreteParent Single discrete parent variable - * @param conditionals Vector of conditionals with the same size as the - * cardinality of the discrete parent. - */ - HybridGaussianConditional( - const DiscreteKey &discreteParent, - const std::vector &conditionals); - /// @} /// @name Testable /// @{ From 2b26b3c971b613226054e2f00668c3b3271b78c5 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Fri, 27 Sep 2024 06:49:36 -0700 Subject: [PATCH 361/398] Major improvement -> no need to copy keys anymore --- gtsam/hybrid/HybridGaussianConditional.cpp | 29 +++++++++++++--------- 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 87dff7e59..6bd859c1d 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -27,30 +27,35 @@ #include #include +#include + namespace gtsam { /* *******************************************************************************/ struct HybridGaussianConditional::ConstructorHelper { - KeyVector frontals, parents; + std::optional nrFrontals; HybridGaussianFactor::FactorValuePairs pairs; - double negLogConstant; - /// Compute all variables needed for the private constructor below. - ConstructorHelper(const Conditionals &conditionals) { - negLogConstant = std::numeric_limits::infinity(); + double minNegLogConstant; - auto func = [&](const GaussianConditional::shared_ptr &c) + /// Compute all variables needed for the private constructor below. + ConstructorHelper(const Conditionals &conditionals) + : minNegLogConstant(std::numeric_limits::infinity()) { + auto func = [this](const GaussianConditional::shared_ptr &c) -> GaussianFactorValuePair { double value = 0.0; if (c) { - if (frontals.empty()) { - frontals = KeyVector(c->frontals().begin(), c->frontals().end()); - parents = KeyVector(c->parents().begin(), c->parents().end()); + if (!nrFrontals.has_value()) { + nrFrontals = c->nrFrontals(); } value = c->negLogConstant(); - negLogConstant = std::min(negLogConstant, value); + minNegLogConstant = std::min(minNegLogConstant, value); } return {std::dynamic_pointer_cast(c), value}; }; pairs = HybridGaussianFactor::FactorValuePairs(conditionals, func); + if (!nrFrontals.has_value()) { + throw std::runtime_error( + "HybridGaussianConditional: need at least one frontal variable."); + } } }; @@ -60,9 +65,9 @@ HybridGaussianConditional::HybridGaussianConditional( const HybridGaussianConditional::Conditionals &conditionals, const ConstructorHelper &helper) : BaseFactor(discreteParents, helper.pairs), - BaseConditional(helper.frontals.size()), + BaseConditional(*helper.nrFrontals), conditionals_(conditionals), - negLogConstant_(helper.negLogConstant) {} + negLogConstant_(helper.minNegLogConstant) {} HybridGaussianConditional::HybridGaussianConditional( const DiscreteKeys &discreteParents, From 7d51e1cdb4d1786757dd447e1d1dc8c3fb16327e Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Fri, 27 Sep 2024 07:54:14 -0700 Subject: [PATCH 362/398] tiny improvement --- gtsam/hybrid/HybridFactorGraph.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridFactorGraph.cpp b/gtsam/hybrid/HybridFactorGraph.cpp index f5a7bcdfe..a395e1701 100644 --- a/gtsam/hybrid/HybridFactorGraph.cpp +++ b/gtsam/hybrid/HybridFactorGraph.cpp @@ -29,8 +29,7 @@ std::set HybridFactorGraph::discreteKeys() const { for (const DiscreteKey& key : p->discreteKeys()) { keys.insert(key); } - } - if (auto p = std::dynamic_pointer_cast(factor)) { + } else if (auto p = std::dynamic_pointer_cast(factor)) { for (const DiscreteKey& key : p->discreteKeys()) { keys.insert(key); } From bc25fcea4db0746da18d061b397ae89f92b908c8 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Fri, 27 Sep 2024 07:54:47 -0700 Subject: [PATCH 363/398] major improvement: continuousSeparator no longer needed --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 26 +++++++++------------- 1 file changed, 10 insertions(+), 16 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index b519b3a0a..7ab7893cb 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -369,7 +369,6 @@ static std::shared_ptr createHybridGaussianFactor( static std::pair> hybridElimination(const HybridGaussianFactorGraph &factors, const Ordering &frontalKeys, - const KeyVector &continuousSeparator, const std::set &discreteSeparatorSet) { // NOTE: since we use the special JunctionTree, // only possibility is continuous conditioned on discrete. @@ -386,13 +385,18 @@ hybridElimination(const HybridGaussianFactorGraph &factors, factorGraphTree = removeEmpty(factorGraphTree); // This is the elimination method on the leaf nodes + bool someContinuousLeft = false; auto eliminate = [&](const GaussianFactorGraph &graph) -> Result { if (graph.empty()) { return {nullptr, nullptr}; } + // Expensive elimination of product factor. auto result = EliminatePreferCholesky(graph, frontalKeys); + // Record whether there any continuous variables left + someContinuousLeft |= !result.second->empty(); + return result; }; @@ -403,9 +407,9 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // error for each discrete choice. Otherwise, create a HybridGaussianFactor // on the separator, taking care to correct for conditional constants. auto newFactor = - continuousSeparator.empty() - ? createDiscreteFactor(eliminationResults, discreteSeparator) - : createHybridGaussianFactor(eliminationResults, discreteSeparator); + someContinuousLeft + ? createHybridGaussianFactor(eliminationResults, discreteSeparator) + : createDiscreteFactor(eliminationResults, discreteSeparator); // Create the HybridGaussianConditional from the conditionals HybridGaussianConditional::Conditionals conditionals( @@ -514,22 +518,12 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, // Case 3: We are now in the hybrid land! KeySet frontalKeysSet(frontalKeys.begin(), frontalKeys.end()); - // Find all the keys in the set of continuous keys - // which are not in the frontal keys. This is our continuous separator. - KeyVector continuousSeparator; - auto continuousKeySet = factors.continuousKeySet(); - std::set_difference( - continuousKeySet.begin(), continuousKeySet.end(), - frontalKeysSet.begin(), frontalKeysSet.end(), - std::inserter(continuousSeparator, continuousSeparator.begin())); - - // Similarly for the discrete separator. + // Find all discrete keys. // Since we eliminate all continuous variables first, // the discrete separator will be *all* the discrete keys. std::set discreteSeparator = factors.discreteKeys(); - return hybridElimination(factors, frontalKeys, continuousSeparator, - discreteSeparator); + return hybridElimination(factors, frontalKeys, discreteSeparator); } } From e3e83cd2fb64db36a9e87b895e5e842da547ca60 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 27 Sep 2024 13:43:43 -0400 Subject: [PATCH 364/398] use User-Agent as Other --- .github/workflows/build-python.yml | 7 ++++++- .github/workflows/build-windows.yml | 10 ++++++---- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build-python.yml b/.github/workflows/build-python.yml index 57b4435af..f581a5974 100644 --- a/.github/workflows/build-python.yml +++ b/.github/workflows/build-python.yml @@ -138,7 +138,12 @@ jobs: # Use the prebuilt binary for Windows $Url = "https://sourceforge.net/projects/boost/files/boost-binaries/$env:BOOST_VERSION/$env:BOOST_EXE-${{matrix.platform}}.exe" - (New-Object System.Net.WebClient).DownloadFile($Url, "$env:TEMP\boost.exe") + + # Create WebClient with appropriate settings and download Boost exe + $wc = New-Object System.Net.Webclient + $wc.Headers.Add("User-Agent: Other"); + $wc.DownloadFile($Url, "$env:TEMP\boost.exe") + Start-Process -Wait -FilePath "$env:TEMP\boost.exe" "/SILENT","/SP-","/SUPPRESSMSGBOXES","/DIR=$BOOST_PATH" # Set the BOOST_ROOT variable diff --git a/.github/workflows/build-windows.yml b/.github/workflows/build-windows.yml index 1bda7e40a..20b4a846f 100644 --- a/.github/workflows/build-windows.yml +++ b/.github/workflows/build-windows.yml @@ -70,9 +70,6 @@ jobs: } if ("${{ matrix.compiler }}" -eq "gcc") { - # Chocolatey GCC is broken on the windows-2019 image. - # See: https://github.com/DaanDeMeyer/doctest/runs/231595515 - # See: https://github.community/t5/GitHub-Actions/Something-is-wrong-with-the-chocolatey-installed-version-of-gcc/td-p/32413 scoop install gcc --global echo "CC=gcc" >> $GITHUB_ENV echo "CXX=g++" >> $GITHUB_ENV @@ -98,7 +95,12 @@ jobs: # Use the prebuilt binary for Windows $Url = "https://sourceforge.net/projects/boost/files/boost-binaries/$env:BOOST_VERSION/$env:BOOST_EXE-${{matrix.platform}}.exe" - (New-Object System.Net.WebClient).DownloadFile($Url, "$env:TEMP\boost.exe") + + # Create WebClient with appropriate settings and download Boost exe + $wc = New-Object System.Net.Webclient + $wc.Headers.Add("User-Agent: Other"); + $wc.DownloadFile($Url, "$env:TEMP\boost.exe") + Start-Process -Wait -FilePath "$env:TEMP\boost.exe" "/SILENT","/SP-","/SUPPRESSMSGBOXES","/DIR=$BOOST_PATH" # Set the BOOST_ROOT variable From 71655cc3cf5851d9d812b4962daa4cedd89739d9 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Fri, 27 Sep 2024 16:18:37 -0700 Subject: [PATCH 365/398] Pulled out hybrid motion model tests --- .../hybrid/tests/testHybridGaussianFactor.cpp | 347 ---------------- gtsam/hybrid/tests/testHybridMotionModel.cpp | 385 ++++++++++++++++++ 2 files changed, 385 insertions(+), 347 deletions(-) create mode 100644 gtsam/hybrid/tests/testHybridMotionModel.cpp diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 080185d88..c2ffe24c8 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -208,354 +208,7 @@ TEST(HybridGaussianFactor, Error) { 4.0, hybridFactor.error({continuousValues, discreteValues}), 1e-9); } -namespace test_two_state_estimation { - -DiscreteKey m1(M(1), 2); - -void addMeasurement(HybridBayesNet &hbn, Key z_key, Key x_key, double sigma) { - auto measurement_model = noiseModel::Isotropic::Sigma(1, sigma); - hbn.emplace_shared(z_key, Vector1(0.0), I_1x1, x_key, - -I_1x1, measurement_model); -} - -/// Create hybrid motion model p(x1 | x0, m1) -static HybridGaussianConditional::shared_ptr CreateHybridMotionModel( - double mu0, double mu1, double sigma0, double sigma1) { - auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); - auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); - auto c0 = make_shared(X(1), Vector1(mu0), I_1x1, X(0), - -I_1x1, model0), - c1 = make_shared(X(1), Vector1(mu1), I_1x1, X(0), - -I_1x1, model1); - DiscreteKeys discreteParents{m1}; - return std::make_shared( - discreteParents, HybridGaussianConditional::Conditionals( - discreteParents, std::vector{c0, c1})); -} - -/// Create two state Bayes network with 1 or two measurement models -HybridBayesNet CreateBayesNet( - const HybridGaussianConditional::shared_ptr &hybridMotionModel, - bool add_second_measurement = false) { - HybridBayesNet hbn; - - // Add measurement model p(z0 | x0) - addMeasurement(hbn, Z(0), X(0), 3.0); - - // Optionally add second measurement model p(z1 | x1) - if (add_second_measurement) { - addMeasurement(hbn, Z(1), X(1), 3.0); - } - - // Add hybrid motion model - hbn.push_back(hybridMotionModel); - - // Discrete uniform prior. - hbn.emplace_shared(m1, "50/50"); - - return hbn; -} - -/// Approximate the discrete marginal P(m1) using importance sampling -std::pair approximateDiscreteMarginal( - const HybridBayesNet &hbn, - const HybridGaussianConditional::shared_ptr &hybridMotionModel, - const VectorValues &given, size_t N = 100000) { - /// Create importance sampling network q(x0,x1,m) = p(x1|x0,m1) q(x0) P(m1), - /// using q(x0) = N(z0, sigmaQ) to sample x0. - HybridBayesNet q; - q.push_back(hybridMotionModel); // Add hybrid motion model - q.emplace_shared(GaussianConditional::FromMeanAndStddev( - X(0), given.at(Z(0)), /* sigmaQ = */ 3.0)); // Add proposal q(x0) for x0 - q.emplace_shared(m1, "50/50"); // Discrete prior. - - // Do importance sampling - double w0 = 0.0, w1 = 0.0; - std::mt19937_64 rng(42); - for (int i = 0; i < N; i++) { - HybridValues sample = q.sample(&rng); - sample.insert(given); - double weight = hbn.evaluate(sample) / q.evaluate(sample); - (sample.atDiscrete(M(1)) == 0) ? w0 += weight : w1 += weight; - } - double pm1 = w1 / (w0 + w1); - std::cout << "p(m0) = " << 100 * (1.0 - pm1) << std::endl; - std::cout << "p(m1) = " << 100 * pm1 << std::endl; - return {1.0 - pm1, pm1}; -} - -} // namespace test_two_state_estimation - /* ************************************************************************* */ -/** - * Test a model p(z0|x0)p(z1|x1)p(x1|x0,m1)P(m1). - * - * p(x1|x0,m1) has mode-dependent mean but same covariance. - * - * Converting to a factor graph gives us ϕ(x0;z0)ϕ(x1;z1)ϕ(x1,x0,m1)P(m1) - * - * If we only have a measurement on x0, then - * the posterior probability of m1 should be 0.5/0.5. - * Getting a measurement on z1 gives use more information. - */ -TEST(HybridGaussianFactor, TwoStateModel) { - using namespace test_two_state_estimation; - - double mu0 = 1.0, mu1 = 3.0; - double sigma = 0.5; - auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma, sigma); - - // Start with no measurement on x1, only on x0 - const Vector1 z0(0.5); - - VectorValues given; - given.insert(Z(0), z0); - - { - HybridBayesNet hbn = CreateBayesNet(hybridMotionModel); - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - - // Since no measurement on x1, we hedge our bets - // Importance sampling run with 100k samples gives 50.051/49.949 - // approximateDiscreteMarginal(hbn, hybridMotionModel, given); - DiscreteConditional expected(m1, "50/50"); - EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()))); - } - - { - // If we set z1=4.5 (>> 2.5 which is the halfway point), - // probability of discrete mode should be leaning to m1==1. - const Vector1 z1(4.5); - given.insert(Z(1), z1); - - HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - - // Since we have a measurement on x1, we get a definite result - // Values taken from an importance sampling run with 100k samples: - // approximateDiscreteMarginal(hbn, hybridMotionModel, given); - DiscreteConditional expected(m1, "44.3854/55.6146"); - EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002)); - } -} - -/* ************************************************************************* */ -/** - * Test a model P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). - * - * P(x1|x0,m1) has different means and different covariances. - * - * Converting to a factor graph gives us - * ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) - * - * If we only have a measurement on z0, then - * the P(m1) should be 0.5/0.5. - * Getting a measurement on z1 gives use more information. - */ -TEST(HybridGaussianFactor, TwoStateModel2) { - using namespace test_two_state_estimation; - - double mu0 = 1.0, mu1 = 3.0; - double sigma0 = 0.5, sigma1 = 2.0; - auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma0, sigma1); - - // Start with no measurement on x1, only on x0 - const Vector1 z0(0.5); - VectorValues given; - given.insert(Z(0), z0); - - { - HybridBayesNet hbn = CreateBayesNet(hybridMotionModel); - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - - // Check that ratio of Bayes net and factor graph for different modes is - // equal for several values of {x0,x1}. - for (VectorValues vv : - {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, - VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { - vv.insert(given); // add measurements for HBN - HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}}); - EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), - gfg.error(hv1) / hbn.error(hv1), 1e-9); - } - - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - - // Importance sampling run with 100k samples gives 50.095/49.905 - // approximateDiscreteMarginal(hbn, hybridMotionModel, given); - - // Since no measurement on x1, we a 50/50 probability - auto p_m = bn->at(2)->asDiscrete(); - EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 0}}), 1e-9); - EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 1}}), 1e-9); - } - - { - // Now we add a measurement z1 on x1 - const Vector1 z1(4.0); // favors m==1 - given.insert(Z(1), z1); - - HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - - // Check that ratio of Bayes net and factor graph for different modes is - // equal for several values of {x0,x1}. - for (VectorValues vv : - {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, - VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { - vv.insert(given); // add measurements for HBN - HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}}); - EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), - gfg.error(hv1) / hbn.error(hv1), 1e-9); - } - - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - - // Values taken from an importance sampling run with 100k samples: - // approximateDiscreteMarginal(hbn, hybridMotionModel, given); - DiscreteConditional expected(m1, "48.3158/51.6842"); - EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002)); - } - - { - // Add a different measurement z1 on x1 that favors m==0 - const Vector1 z1(1.1); - given.insert_or_assign(Z(1), z1); - - HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - - // Values taken from an importance sampling run with 100k samples: - // approximateDiscreteMarginal(hbn, hybridMotionModel, given); - DiscreteConditional expected(m1, "55.396/44.604"); - EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002)); - } -} - -/* ************************************************************************* */ -/** - * Test a model p(z0|x0)p(x1|x0,m1)p(z1|x1)p(m1). - * - * p(x1|x0,m1) has the same means but different covariances. - * - * Converting to a factor graph gives us - * ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)p(m1) - * - * If we only have a measurement on z0, then - * the p(m1) should be 0.5/0.5. - * Getting a measurement on z1 gives use more information. - */ -TEST(HybridGaussianFactor, TwoStateModel3) { - using namespace test_two_state_estimation; - - double mu = 1.0; - double sigma0 = 0.5, sigma1 = 2.0; - auto hybridMotionModel = CreateHybridMotionModel(mu, mu, sigma0, sigma1); - - // Start with no measurement on x1, only on x0 - const Vector1 z0(0.5); - VectorValues given; - given.insert(Z(0), z0); - - { - HybridBayesNet hbn = CreateBayesNet(hybridMotionModel); - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - - // Check that ratio of Bayes net and factor graph for different modes is - // equal for several values of {x0,x1}. - for (VectorValues vv : - {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, - VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { - vv.insert(given); // add measurements for HBN - HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}}); - EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), - gfg.error(hv1) / hbn.error(hv1), 1e-9); - } - - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - - // Importance sampling run with 100k samples gives 50.095/49.905 - // approximateDiscreteMarginal(hbn, hybridMotionModel, given); - - // Since no measurement on x1, we a 50/50 probability - auto p_m = bn->at(2)->asDiscrete(); - EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 0}}), 1e-9); - EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 1}}), 1e-9); - } - - { - // Now we add a measurement z1 on x1 - const Vector1 z1(4.0); // favors m==1 - given.insert(Z(1), z1); - - HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - - // Check that ratio of Bayes net and factor graph for different modes is - // equal for several values of {x0,x1}. - for (VectorValues vv : - {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, - VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { - vv.insert(given); // add measurements for HBN - HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}}); - EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), - gfg.error(hv1) / hbn.error(hv1), 1e-9); - } - - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - - // Values taken from an importance sampling run with 100k samples: - // approximateDiscreteMarginal(hbn, hybridMotionModel, given); - DiscreteConditional expected(m1, "51.7762/48.2238"); - EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002)); - } - - { - // Add a different measurement z1 on x1 that favors m==1 - const Vector1 z1(7.0); - given.insert_or_assign(Z(1), z1); - - HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - - // Values taken from an importance sampling run with 100k samples: - // approximateDiscreteMarginal(hbn, hybridMotionModel, given); - DiscreteConditional expected(m1, "49.0762/50.9238"); - EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.005)); - } -} - -/* ************************************************************************* */ -/** - * Same model, P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1), but now with very informative - * measurements and vastly different motion model: either stand still or move - * far. This yields a very informative posterior. - */ -TEST(HybridGaussianFactor, TwoStateModel4) { - using namespace test_two_state_estimation; - - double mu0 = 0.0, mu1 = 10.0; - double sigma0 = 0.2, sigma1 = 5.0; - auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma0, sigma1); - - // We only check the 2-measurement case - const Vector1 z0(0.0), z1(10.0); - VectorValues given{{Z(0), z0}, {Z(1), z1}}; - - HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - - // Values taken from an importance sampling run with 100k samples: - // approximateDiscreteMarginal(hbn, hybridMotionModel, given); - DiscreteConditional expected(m1, "8.91527/91.0847"); - EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002)); -} - namespace test_direct_factor_graph { /** * @brief Create a Factor Graph by directly specifying all diff --git a/gtsam/hybrid/tests/testHybridMotionModel.cpp b/gtsam/hybrid/tests/testHybridMotionModel.cpp new file mode 100644 index 000000000..1a2a09e15 --- /dev/null +++ b/gtsam/hybrid/tests/testHybridMotionModel.cpp @@ -0,0 +1,385 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridMotionModel.cpp + * @brief Tests hybrid inference with a simple switching motion model + * @author Varun Agrawal + * @author Fan Jiang + * @author Frank Dellaert + * @date December 2021 + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// Include for test suite +#include + +#include + +using namespace std; +using namespace gtsam; +using symbol_shorthand::M; +using symbol_shorthand::X; +using symbol_shorthand::Z; + +DiscreteKey m1(M(1), 2); + +void addMeasurement(HybridBayesNet &hbn, Key z_key, Key x_key, double sigma) { + auto measurement_model = noiseModel::Isotropic::Sigma(1, sigma); + hbn.emplace_shared(z_key, Vector1(0.0), I_1x1, x_key, + -I_1x1, measurement_model); +} + +/// Create hybrid motion model p(x1 | x0, m1) +static HybridGaussianConditional::shared_ptr CreateHybridMotionModel( + double mu0, double mu1, double sigma0, double sigma1) { + auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); + auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); + auto c0 = make_shared(X(1), Vector1(mu0), I_1x1, X(0), + -I_1x1, model0), + c1 = make_shared(X(1), Vector1(mu1), I_1x1, X(0), + -I_1x1, model1); + return std::make_shared(m1, std::vector{c0, c1}); +} + +/// Create two state Bayes network with 1 or two measurement models +HybridBayesNet CreateBayesNet( + const HybridGaussianConditional::shared_ptr &hybridMotionModel, + bool add_second_measurement = false) { + HybridBayesNet hbn; + + // Add measurement model p(z0 | x0) + addMeasurement(hbn, Z(0), X(0), 3.0); + + // Optionally add second measurement model p(z1 | x1) + if (add_second_measurement) { + addMeasurement(hbn, Z(1), X(1), 3.0); + } + + // Add hybrid motion model + hbn.push_back(hybridMotionModel); + + // Discrete uniform prior. + hbn.emplace_shared(m1, "50/50"); + + return hbn; +} + +/// Approximate the discrete marginal P(m1) using importance sampling +std::pair approximateDiscreteMarginal( + const HybridBayesNet &hbn, + const HybridGaussianConditional::shared_ptr &hybridMotionModel, + const VectorValues &given, size_t N = 100000) { + /// Create importance sampling network q(x0,x1,m) = p(x1|x0,m1) q(x0) P(m1), + /// using q(x0) = N(z0, sigmaQ) to sample x0. + HybridBayesNet q; + q.push_back(hybridMotionModel); // Add hybrid motion model + q.emplace_shared(GaussianConditional::FromMeanAndStddev( + X(0), given.at(Z(0)), /* sigmaQ = */ 3.0)); // Add proposal q(x0) for x0 + q.emplace_shared(m1, "50/50"); // Discrete prior. + + // Do importance sampling + double w0 = 0.0, w1 = 0.0; + std::mt19937_64 rng(42); + for (int i = 0; i < N; i++) { + HybridValues sample = q.sample(&rng); + sample.insert(given); + double weight = hbn.evaluate(sample) / q.evaluate(sample); + (sample.atDiscrete(M(1)) == 0) ? w0 += weight : w1 += weight; + } + double pm1 = w1 / (w0 + w1); + std::cout << "p(m0) = " << 100 * (1.0 - pm1) << std::endl; + std::cout << "p(m1) = " << 100 * pm1 << std::endl; + return {1.0 - pm1, pm1}; +} + +/* ************************************************************************* */ +/** + * Test a model p(z0|x0)p(z1|x1)p(x1|x0,m1)P(m1). + * + * p(x1|x0,m1) has mode-dependent mean but same covariance. + * + * Converting to a factor graph gives us ϕ(x0;z0)ϕ(x1;z1)ϕ(x1,x0,m1)P(m1) + * + * If we only have a measurement on x0, then + * the posterior probability of m1 should be 0.5/0.5. + * Getting a measurement on z1 gives use more information. + */ +TEST(HybridGaussianFactor, TwoStateModel) { + double mu0 = 1.0, mu1 = 3.0; + double sigma = 0.5; + auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma, sigma); + + // Start with no measurement on x1, only on x0 + const Vector1 z0(0.5); + + VectorValues given; + given.insert(Z(0), z0); + + { + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Since no measurement on x1, we hedge our bets + // Importance sampling run with 100k samples gives 50.051/49.949 + // approximateDiscreteMarginal(hbn, hybridMotionModel, given); + DiscreteConditional expected(m1, "50/50"); + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()))); + } + + { + // If we set z1=4.5 (>> 2.5 which is the halfway point), + // probability of discrete mode should be leaning to m1==1. + const Vector1 z1(4.5); + given.insert(Z(1), z1); + + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Since we have a measurement on x1, we get a definite result + // Values taken from an importance sampling run with 100k samples: + // approximateDiscreteMarginal(hbn, hybridMotionModel, given); + DiscreteConditional expected(m1, "44.3854/55.6146"); + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002)); + } +} + +/* ************************************************************************* */ +/** + * Test a model P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). + * + * P(x1|x0,m1) has different means and different covariances. + * + * Converting to a factor graph gives us + * ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) + * + * If we only have a measurement on z0, then + * the P(m1) should be 0.5/0.5. + * Getting a measurement on z1 gives use more information. + */ +TEST(HybridGaussianFactor, TwoStateModel2) { + double mu0 = 1.0, mu1 = 3.0; + double sigma0 = 0.5, sigma1 = 2.0; + auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma0, sigma1); + + // Start with no measurement on x1, only on x0 + const Vector1 z0(0.5); + VectorValues given; + given.insert(Z(0), z0); + + { + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + + // Check that ratio of Bayes net and factor graph for different modes is + // equal for several values of {x0,x1}. + for (VectorValues vv : + {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, + VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { + vv.insert(given); // add measurements for HBN + HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}}); + EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), + gfg.error(hv1) / hbn.error(hv1), 1e-9); + } + + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Importance sampling run with 100k samples gives 50.095/49.905 + // approximateDiscreteMarginal(hbn, hybridMotionModel, given); + + // Since no measurement on x1, we a 50/50 probability + auto p_m = bn->at(2)->asDiscrete(); + EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 0}}), 1e-9); + EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 1}}), 1e-9); + } + + { + // Now we add a measurement z1 on x1 + const Vector1 z1(4.0); // favors m==1 + given.insert(Z(1), z1); + + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + + // Check that ratio of Bayes net and factor graph for different modes is + // equal for several values of {x0,x1}. + for (VectorValues vv : + {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, + VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { + vv.insert(given); // add measurements for HBN + HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}}); + EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), + gfg.error(hv1) / hbn.error(hv1), 1e-9); + } + + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Values taken from an importance sampling run with 100k samples: + // approximateDiscreteMarginal(hbn, hybridMotionModel, given); + DiscreteConditional expected(m1, "48.3158/51.6842"); + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002)); + } + + { + // Add a different measurement z1 on x1 that favors m==0 + const Vector1 z1(1.1); + given.insert_or_assign(Z(1), z1); + + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Values taken from an importance sampling run with 100k samples: + // approximateDiscreteMarginal(hbn, hybridMotionModel, given); + DiscreteConditional expected(m1, "55.396/44.604"); + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002)); + } +} + +/* ************************************************************************* */ +/** + * Test a model p(z0|x0)p(x1|x0,m1)p(z1|x1)p(m1). + * + * p(x1|x0,m1) has the same means but different covariances. + * + * Converting to a factor graph gives us + * ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)p(m1) + * + * If we only have a measurement on z0, then + * the p(m1) should be 0.5/0.5. + * Getting a measurement on z1 gives use more information. + */ +TEST(HybridGaussianFactor, TwoStateModel3) { + double mu = 1.0; + double sigma0 = 0.5, sigma1 = 2.0; + auto hybridMotionModel = CreateHybridMotionModel(mu, mu, sigma0, sigma1); + + // Start with no measurement on x1, only on x0 + const Vector1 z0(0.5); + VectorValues given; + given.insert(Z(0), z0); + + { + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + + // Check that ratio of Bayes net and factor graph for different modes is + // equal for several values of {x0,x1}. + for (VectorValues vv : + {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, + VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { + vv.insert(given); // add measurements for HBN + HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}}); + EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), + gfg.error(hv1) / hbn.error(hv1), 1e-9); + } + + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Importance sampling run with 100k samples gives 50.095/49.905 + // approximateDiscreteMarginal(hbn, hybridMotionModel, given); + + // Since no measurement on x1, we a 50/50 probability + auto p_m = bn->at(2)->asDiscrete(); + EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 0}}), 1e-9); + EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 1}}), 1e-9); + } + + { + // Now we add a measurement z1 on x1 + const Vector1 z1(4.0); // favors m==1 + given.insert(Z(1), z1); + + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + + // Check that ratio of Bayes net and factor graph for different modes is + // equal for several values of {x0,x1}. + for (VectorValues vv : + {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, + VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { + vv.insert(given); // add measurements for HBN + HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}}); + EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), + gfg.error(hv1) / hbn.error(hv1), 1e-9); + } + + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Values taken from an importance sampling run with 100k samples: + // approximateDiscreteMarginal(hbn, hybridMotionModel, given); + DiscreteConditional expected(m1, "51.7762/48.2238"); + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002)); + } + + { + // Add a different measurement z1 on x1 that favors m==1 + const Vector1 z1(7.0); + given.insert_or_assign(Z(1), z1); + + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Values taken from an importance sampling run with 100k samples: + // approximateDiscreteMarginal(hbn, hybridMotionModel, given); + DiscreteConditional expected(m1, "49.0762/50.9238"); + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.005)); + } +} + +/* ************************************************************************* */ +/** + * Same model, P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1), but now with very informative + * measurements and vastly different motion model: either stand still or move + * far. This yields a very informative posterior. + */ +TEST(HybridGaussianFactor, TwoStateModel4) { + double mu0 = 0.0, mu1 = 10.0; + double sigma0 = 0.2, sigma1 = 5.0; + auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma0, sigma1); + + // We only check the 2-measurement case + const Vector1 z0(0.0), z1(10.0); + VectorValues given{{Z(0), z0}, {Z(1), z1}}; + + HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Values taken from an importance sampling run with 100k samples: + // approximateDiscreteMarginal(hbn, hybridMotionModel, given); + DiscreteConditional expected(m1, "8.91527/91.0847"); + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002)); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ From 8907ca74438859f49dcbd56d78e338deddc86a2a Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 28 Sep 2024 11:39:58 -0700 Subject: [PATCH 366/398] Switching to NoiseModelFactor --- gtsam/hybrid/HybridNonlinearFactor.cpp | 20 +++++-- gtsam/hybrid/HybridNonlinearFactor.h | 20 +++---- gtsam/hybrid/tests/Switching.h | 3 +- gtsam/hybrid/tests/testHybridBayesNet.cpp | 3 +- gtsam/hybrid/tests/testHybridEstimation.cpp | 58 +++---------------- gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 3 +- .../tests/testHybridNonlinearFactorGraph.cpp | 10 ++-- .../hybrid/tests/testHybridNonlinearISAM.cpp | 3 +- 8 files changed, 43 insertions(+), 77 deletions(-) diff --git a/gtsam/hybrid/HybridNonlinearFactor.cpp b/gtsam/hybrid/HybridNonlinearFactor.cpp index 7b63c0dff..528b150ac 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.cpp +++ b/gtsam/hybrid/HybridNonlinearFactor.cpp @@ -17,6 +17,7 @@ */ #include +#include #include #include @@ -29,7 +30,7 @@ struct HybridNonlinearFactor::ConstructorHelper { DiscreteKeys discreteKeys; // Discrete keys provided to the constructors FactorValuePairs factorTree; - void copyOrCheckContinuousKeys(const NonlinearFactor::shared_ptr& factor) { + void copyOrCheckContinuousKeys(const NoiseModelFactor::shared_ptr& factor) { if (!factor) return; if (continuousKeys.empty()) { continuousKeys = factor->keys(); @@ -40,7 +41,7 @@ struct HybridNonlinearFactor::ConstructorHelper { } ConstructorHelper(const DiscreteKey& discreteKey, - const std::vector& factors) + const std::vector& factors) : discreteKeys({discreteKey}) { std::vector pairs; // Extract continuous keys from the first non-null factor @@ -78,7 +79,7 @@ HybridNonlinearFactor::HybridNonlinearFactor(const ConstructorHelper& helper) HybridNonlinearFactor::HybridNonlinearFactor( const DiscreteKey& discreteKey, - const std::vector& factors) + const std::vector& factors) : HybridNonlinearFactor(ConstructorHelper(discreteKey, factors)) {} HybridNonlinearFactor::HybridNonlinearFactor( @@ -158,8 +159,7 @@ bool HybridNonlinearFactor::equals(const HybridFactor& other, // Ensure that this HybridNonlinearFactor and `f` have the same `factors_`. auto compare = [tol](const std::pair& a, const std::pair& b) { - return traits::Equals(*a.first, *b.first, tol) && - (a.second == b.second); + return a.first->equals(*b.first, tol) && (a.second == b.second); }; if (!factors_.equals(f.factors_, compare)) return false; @@ -185,7 +185,15 @@ std::shared_ptr HybridNonlinearFactor::linearize( [continuousValues]( const std::pair& f) -> GaussianFactorValuePair { auto [factor, val] = f; - return {factor->linearize(continuousValues), val}; + if (auto gaussian = std::dynamic_pointer_cast( + factor->noiseModel())) { + return {factor->linearize(continuousValues), + val + gaussian->negLogConstant()}; + } else { + throw std::runtime_error( + "HybridNonlinearFactor: linearize() only " + "supports Gaussian factors."); + } }; DecisionTree> diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index a3b6ad4d9..325fa3eaa 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -26,25 +26,23 @@ #include #include -#include -#include -#include #include namespace gtsam { -/// Alias for a NonlinearFactor shared pointer and double scalar pair. -using NonlinearFactorValuePair = std::pair; +/// Alias for a NoiseModelFactor shared pointer and double scalar pair. +using NonlinearFactorValuePair = + std::pair; /** * @brief Implementation of a discrete-conditioned hybrid factor. * * Implements a joint discrete-continuous factor where the discrete variable - * serves to "select" a hybrid component corresponding to a NonlinearFactor. + * serves to "select" a hybrid component corresponding to a NoiseModelFactor. * * This class stores all factors as HybridFactors which can then be typecast to - * one of (NonlinearFactor, GaussianFactor) which can then be checked to perform - * the correct operation. + * one of (NoiseModelFactor, GaussianFactor) which can then be checked to + * perform the correct operation. * * In factor graphs the error function typically returns 0.5*|h(x)-z|^2, i.e., * the negative log-likelihood for a Gaussian noise model. @@ -62,11 +60,11 @@ class GTSAM_EXPORT HybridNonlinearFactor : public HybridFactor { using Base = HybridFactor; using This = HybridNonlinearFactor; using shared_ptr = std::shared_ptr; - using sharedFactor = std::shared_ptr; + using sharedFactor = std::shared_ptr; /** * @brief typedef for DecisionTree which has Keys as node labels and - * pairs of NonlinearFactor & an arbitrary scalar as leaf nodes. + * pairs of NoiseModelFactor & an arbitrary scalar as leaf nodes. */ using FactorValuePairs = DecisionTree; @@ -95,7 +93,7 @@ class GTSAM_EXPORT HybridNonlinearFactor : public HybridFactor { */ HybridNonlinearFactor( const DiscreteKey& discreteKey, - const std::vector& factors); + const std::vector& factors); /** * @brief Construct a new HybridNonlinearFactor on a single discrete key, diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 23a102fd5..90137a4e3 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -33,6 +33,7 @@ #include "gtsam/linear/GaussianFactor.h" #include "gtsam/linear/GaussianFactorGraph.h" +#include "gtsam/nonlinear/NonlinearFactor.h" #pragma once @@ -185,7 +186,7 @@ struct Switching { } // Create motion models for a given time step - static std::vector motionModels( + static std::vector motionModels( size_t k, double sigma = 1.0) { auto noise_model = noiseModel::Isotropic::Sigma(1, sigma); auto still = diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 8659100ac..769512bed 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -24,6 +24,7 @@ #include "Switching.h" #include "TinyHybridExample.h" +#include "gtsam/nonlinear/NonlinearFactor.h" // Include for test suite #include @@ -389,7 +390,7 @@ TEST(HybridBayesNet, Sampling) { std::make_shared>(X(0), X(1), 1, noise_model); nfg.emplace_shared( DiscreteKey(M(0), 2), - std::vector{zero_motion, one_motion}); + std::vector{zero_motion, one_motion}); DiscreteKey mode(M(0), 2); nfg.emplace_shared(mode, "1/1"); diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 1b0286084..2a5fb93ba 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -39,6 +39,7 @@ #include #include "Switching.h" +#include "gtsam/nonlinear/NonlinearFactor.h" using namespace std; using namespace gtsam; @@ -435,8 +436,8 @@ static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() { std::make_shared>(X(0), X(1), 0, noise_model); const auto one_motion = std::make_shared>(X(0), X(1), 1, noise_model); - std::vector components = {zero_motion, - one_motion}; + std::vector components = {zero_motion, + one_motion}; nfg.emplace_shared(m, components); return nfg; @@ -526,49 +527,6 @@ TEST(HybridEstimation, CorrectnessViaSampling) { } } -/****************************************************************************/ -/** - * Helper function to add the constant term corresponding to - * the difference in noise models. - */ -std::shared_ptr mixedVarianceFactor( - const HybridNonlinearFactor& mf, const Values& initial, const Key& mode, - double noise_tight, double noise_loose, size_t d, size_t tight_index) { - HybridGaussianFactor::shared_ptr gmf = mf.linearize(initial); - - constexpr double log2pi = 1.8378770664093454835606594728112; - // logConstant will be of the tighter model - double logNormalizationConstant = log(1.0 / noise_tight); - double logConstant = -0.5 * d * log2pi + logNormalizationConstant; - - auto func = [&](const Assignment& assignment, - const GaussianFactor::shared_ptr& gf) { - if (assignment.at(mode) != tight_index) { - double factor_log_constant = -0.5 * d * log2pi + log(1.0 / noise_loose); - - GaussianFactorGraph _gfg; - _gfg.push_back(gf); - Vector c(d); - for (size_t i = 0; i < d; i++) { - c(i) = std::sqrt(2.0 * (logConstant - factor_log_constant)); - } - - _gfg.emplace_shared(c); - return std::make_shared(_gfg); - } else { - return dynamic_pointer_cast(gf); - } - }; - auto updated_components = gmf->factors().apply(func); - auto updated_pairs = HybridGaussianFactor::FactorValuePairs( - updated_components, - [](const GaussianFactor::shared_ptr& gf) -> GaussianFactorValuePair { - return {gf, 0.0}; - }); - return std::make_shared(gmf->discreteKeys(), - updated_pairs); -} - /****************************************************************************/ TEST(HybridEstimation, ModeSelection) { HybridNonlinearFactorGraph graph; @@ -588,15 +546,14 @@ TEST(HybridEstimation, ModeSelection) { X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_loose)), model1 = std::make_shared( X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight)); - std::vector components = {model0, model1}; + std::vector components = {model0, model1}; HybridNonlinearFactor mf({M(0), 2}, components); initial.insert(X(0), 0.0); initial.insert(X(1), 0.0); - auto gmf = - mixedVarianceFactor(mf, initial, M(0), noise_tight, noise_loose, d, 1); + auto gmf = mf.linearize(initial); graph.add(gmf); auto gfg = graph.linearize(initial); @@ -676,15 +633,14 @@ TEST(HybridEstimation, ModeSelection2) { X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_loose)), model1 = std::make_shared>( X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_tight)); - std::vector components = {model0, model1}; + std::vector components = {model0, model1}; HybridNonlinearFactor mf({M(0), 2}, components); initial.insert(X(0), Z_3x1); initial.insert(X(1), Z_3x1); - auto gmf = - mixedVarianceFactor(mf, initial, M(0), noise_tight, noise_loose, d, 1); + auto gmf = mf.linearize(initial); graph.add(gmf); auto gfg = graph.linearize(initial); diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index 280c6cb53..7572a64e2 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -30,6 +30,7 @@ #include #include "Switching.h" +#include "gtsam/nonlinear/NonlinearFactor.h" // Include for test suite #include @@ -415,7 +416,7 @@ TEST(HybridGaussianISAM, NonTrivial) { // Add odometry factor with discrete modes. Pose2 odometry(1.0, 0.0, 0.0); auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); - std::vector components; + std::vector components; components.emplace_back( new PlanarMotionModel(W(0), W(1), odometry, noise_model)); // moving components.emplace_back( diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index f9a546c81..d32312508 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -36,6 +36,7 @@ #include #include "Switching.h" +#include "gtsam/nonlinear/NonlinearFactor.h" // Include for test suite #include @@ -119,7 +120,7 @@ TEST(HybridNonlinearFactorGraph, Resize) { namespace test_motion { gtsam::DiscreteKey m1(M(1), 2); auto noise_model = noiseModel::Isotropic::Sigma(1, 1.0); -std::vector components = { +std::vector components = { std::make_shared(X(0), X(1), 0.0, noise_model), std::make_shared(X(0), X(1), 1.0, noise_model)}; } // namespace test_motion @@ -207,7 +208,7 @@ TEST(HybridNonlinearFactorGraph, PushBack) { factors.emplace_shared>(1, Pose2(1, 0, 0), noise); factors.emplace_shared>(2, Pose2(2, 0, 0), noise); // TODO(Varun) This does not currently work. It should work once HybridFactor - // becomes a base class of NonlinearFactor. + // becomes a base class of NoiseModelFactor. // hnfg.push_back(factors.begin(), factors.end()); // EXPECT_LONGS_EQUAL(3, hnfg.size()); @@ -807,7 +808,7 @@ TEST(HybridNonlinearFactorGraph, DefaultDecisionTree) { // Add odometry factor Pose2 odometry(2.0, 0.0, 0.0); auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); - std::vector motion_models = { + std::vector motion_models = { std::make_shared(X(0), X(1), Pose2(0, 0, 0), noise_model), std::make_shared(X(0), X(1), odometry, noise_model)}; @@ -874,8 +875,7 @@ static HybridNonlinearFactorGraph CreateFactorGraph( // Create HybridNonlinearFactor // We take negative since we want // the underlying scalar to be log(\sqrt(|2πΣ|)) - std::vector factors{{f0, model0->negLogConstant()}, - {f1, model1->negLogConstant()}}; + std::vector factors{{f0, 0.0}, {f1, 0.0}}; HybridNonlinearFactor mixtureFactor(m1, factors); diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index 9e93116fc..5a04fc089 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -30,6 +30,7 @@ #include #include "Switching.h" +#include "gtsam/nonlinear/NonlinearFactor.h" // Include for test suite #include @@ -438,7 +439,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { noise_model), moving = std::make_shared(W(0), W(1), odometry, noise_model); - std::vector components{moving, still}; + std::vector components{moving, still}; fg.emplace_shared(DiscreteKey(M(1), 2), components); // Add equivalent of ImuFactor From b2c73af2a2d20b3d573a92b99fddd0d5e9dd30f4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 28 Sep 2024 16:52:35 -0400 Subject: [PATCH 367/398] get keys from NonlinearFactors as well --- gtsam/hybrid/HybridFactorGraph.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/gtsam/hybrid/HybridFactorGraph.cpp b/gtsam/hybrid/HybridFactorGraph.cpp index a395e1701..78c051b17 100644 --- a/gtsam/hybrid/HybridFactorGraph.cpp +++ b/gtsam/hybrid/HybridFactorGraph.cpp @@ -18,6 +18,7 @@ */ #include +#include namespace gtsam { @@ -58,6 +59,8 @@ const KeySet HybridFactorGraph::continuousKeySet() const { } } else if (auto p = std::dynamic_pointer_cast(factor)) { keys.insert(p->keys().begin(), p->keys().end()); + } else if (auto p = std::dynamic_pointer_cast(factor)) { + keys.insert(p->keys().begin(), p->keys().end()); } } return keys; From e3190189c818419fe8d908e6f1b7767e5f56a1cd Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 28 Sep 2024 15:52:51 -0700 Subject: [PATCH 368/398] Fix message --- gtsam/hybrid/HybridNonlinearFactor.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridNonlinearFactor.cpp b/gtsam/hybrid/HybridNonlinearFactor.cpp index 528b150ac..9378d07fe 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.cpp +++ b/gtsam/hybrid/HybridNonlinearFactor.cpp @@ -191,8 +191,8 @@ std::shared_ptr HybridNonlinearFactor::linearize( val + gaussian->negLogConstant()}; } else { throw std::runtime_error( - "HybridNonlinearFactor: linearize() only " - "supports Gaussian factors."); + "HybridNonlinearFactor: linearize() only supports NoiseModelFactors " + "with Gaussian (or derived) noise models."); } }; From 3567cbbe0f84957a968824cb11f0defa339809be Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 28 Sep 2024 15:52:57 -0700 Subject: [PATCH 369/398] Fix wrapper --- gtsam/hybrid/hybrid.i | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index 9ed96d115..2d59c100f 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -245,16 +245,16 @@ class HybridNonlinearFactorGraph { class HybridNonlinearFactor : gtsam::HybridFactor { HybridNonlinearFactor( const gtsam::DiscreteKey& discreteKey, - const std::vector& factors); + const std::vector& factors); HybridNonlinearFactor( const gtsam::DiscreteKey& discreteKey, - const std::vector>& factors); + const std::vector>& factors); HybridNonlinearFactor( const gtsam::DiscreteKeys& discreteKeys, const gtsam::DecisionTree< - gtsam::Key, std::pair>& factors); + gtsam::Key, std::pair>& factors); double error(const gtsam::Values& continuousValues, const gtsam::DiscreteValues& discreteValues) const; From 8904f5f3ff47d1859006f5ce782f72b86258dd42 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 28 Sep 2024 18:54:55 -0400 Subject: [PATCH 370/398] better docstring for prunerFunc and added dividers --- gtsam/hybrid/HybridGaussianConditional.cpp | 9 ++------- gtsam/hybrid/HybridGaussianConditional.h | 8 +++++++- gtsam/hybrid/HybridGaussianFactor.cpp | 4 ++++ 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 6bd859c1d..4e896206f 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -69,12 +69,14 @@ HybridGaussianConditional::HybridGaussianConditional( conditionals_(conditionals), negLogConstant_(helper.minNegLogConstant) {} +/* *******************************************************************************/ HybridGaussianConditional::HybridGaussianConditional( const DiscreteKeys &discreteParents, const HybridGaussianConditional::Conditionals &conditionals) : HybridGaussianConditional(discreteParents, conditionals, ConstructorHelper(conditionals)) {} +/* *******************************************************************************/ HybridGaussianConditional::HybridGaussianConditional( const DiscreteKey &discreteParent, const std::vector &conditionals) @@ -242,13 +244,6 @@ std::set DiscreteKeysAsSet(const DiscreteKeys &discreteKeys) { } /* ************************************************************************* */ -/** - * @brief Helper function to get the pruner functional. - * - * @param discreteProbs The probabilities of only discrete keys. - * @return std::function &, const GaussianConditional::shared_ptr &)> - */ std::function &, const GaussianConditional::shared_ptr &)> HybridGaussianConditional::prunerFunc(const DecisionTreeFactor &discreteProbs) { diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index e0bf66e52..02f6bba1e 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -194,7 +194,13 @@ class GTSAM_EXPORT HybridGaussianConditional /// Convert to a DecisionTree of Gaussian factor graphs. GaussianFactorGraphTree asGaussianFactorGraphTree() const; - //// Get the pruner functor from pruned discrete probabilities. + /** + * @brief Get the pruner function from discrete probabilities. + * + * @param discreteProbs The probabilities of only discrete keys. + * @return std::function &, const GaussianConditional::shared_ptr &)> + */ std::function &, const GaussianConditional::shared_ptr &)> prunerFunc(const DecisionTreeFactor &prunedProbabilities); diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index 6f33c5328..7f8e808bf 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -126,16 +126,19 @@ HybridGaussianFactor::HybridGaussianFactor(const ConstructorHelper &helper) factors_(helper.factorsTree.empty() ? augment(helper.pairs) : helper.factorsTree) {} +/* *******************************************************************************/ HybridGaussianFactor::HybridGaussianFactor( const DiscreteKey &discreteKey, const std::vector &factors) : HybridGaussianFactor(ConstructorHelper(discreteKey, factors)) {} +/* *******************************************************************************/ HybridGaussianFactor::HybridGaussianFactor( const DiscreteKey &discreteKey, const std::vector &factorPairs) : HybridGaussianFactor(ConstructorHelper(discreteKey, factorPairs)) {} +/* *******************************************************************************/ HybridGaussianFactor::HybridGaussianFactor(const DiscreteKeys &discreteKeys, const FactorValuePairs &factors) : HybridGaussianFactor(ConstructorHelper(discreteKeys, factors)) {} @@ -223,6 +226,7 @@ double HybridGaussianFactor::potentiallyPrunedComponentError( return std::numeric_limits::max(); } } + /* *******************************************************************************/ AlgebraicDecisionTree HybridGaussianFactor::errorTree( const VectorValues &continuousValues) const { From dac90db441525fcc4836c4958ed1a7c519ac56f0 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 28 Sep 2024 19:53:09 -0700 Subject: [PATCH 371/398] Small things --- gtsam/hybrid/tests/testHybridBayesNet.cpp | 2 +- gtsam/hybrid/tests/testHybridEstimation.cpp | 3 +- .../tests/testHybridGaussianFactorGraph.cpp | 39 +++++++------------ gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 2 +- .../tests/testHybridNonlinearFactorGraph.cpp | 4 +- .../hybrid/tests/testHybridNonlinearISAM.cpp | 2 +- 6 files changed, 20 insertions(+), 32 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 769512bed..ec8f08741 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -20,11 +20,11 @@ #include #include +#include #include #include "Switching.h" #include "TinyHybridExample.h" -#include "gtsam/nonlinear/NonlinearFactor.h" // Include for test suite #include diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 2a5fb93ba..672f47503 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -19,6 +19,7 @@ #include #include #include +#include #include #include #include @@ -29,6 +30,7 @@ #include #include #include +#include #include #include #include @@ -39,7 +41,6 @@ #include #include "Switching.h" -#include "gtsam/nonlinear/NonlinearFactor.h" using namespace std; using namespace gtsam; diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index e4acda387..0421727ff 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -61,6 +61,8 @@ using gtsam::symbol_shorthand::Z; // Set up sampling std::mt19937_64 kRng(42); +static const DiscreteKey m1(M(1), 2); + /* ************************************************************************* */ TEST(HybridGaussianFactorGraph, Creation) { HybridConditional conditional; @@ -98,11 +100,9 @@ TEST(HybridGaussianFactorGraph, EliminateMultifrontal) { // Test multifrontal elimination HybridGaussianFactorGraph hfg; - DiscreteKey m(M(1), 2); - // Add priors on x0 and c1 hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); - hfg.add(DecisionTreeFactor(m, {2, 8})); + hfg.add(DecisionTreeFactor(m1, {2, 8})); Ordering ordering; ordering.push_back(X(0)); @@ -131,7 +131,6 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); // Add a hybrid gaussian factor ϕ(x1, c1) - DiscreteKey m1(M(1), 2); hfg.add(HybridGaussianFactor(m1, two::components(X(1)))); auto result = hfg.eliminateSequential(); @@ -148,8 +147,6 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) { HybridGaussianFactorGraph hfg; - DiscreteKey m1(M(1), 2); - // Add prior on x0 hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); // Add factor between x0 and x1 @@ -172,8 +169,6 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) { TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { HybridGaussianFactorGraph hfg; - DiscreteKey m1(M(1), 2); - hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); @@ -196,17 +191,15 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) { HybridGaussianFactorGraph hfg; - DiscreteKey m(M(1), 2); - // Prior on x0 hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); // Factor between x0-x1 hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); // Hybrid factor P(x1|c1) - hfg.add(HybridGaussianFactor(m, two::components(X(1)))); + hfg.add(HybridGaussianFactor(m1, two::components(X(1)))); // Prior factor on c1 - hfg.add(DecisionTreeFactor(m, {2, 8})); + hfg.add(DecisionTreeFactor(m1, {2, 8})); // Get a constrained ordering keeping c1 last auto ordering_full = HybridOrdering(hfg); @@ -521,17 +514,15 @@ TEST(HybridGaussianFactorGraph, DiscreteSelection) { TEST(HybridGaussianFactorGraph, optimize) { HybridGaussianFactorGraph hfg; - DiscreteKey c1(C(1), 2); - hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - hfg.add(HybridGaussianFactor(c1, two::components(X(1)))); + hfg.add(HybridGaussianFactor(m1, two::components(X(1)))); auto result = hfg.eliminateSequential(); HybridValues hv = result->optimize(); - EXPECT(assert_equal(hv.atDiscrete(C(1)), int(0))); + EXPECT(assert_equal(hv.atDiscrete(M(1)), int(0))); } /* ************************************************************************* */ @@ -626,7 +617,6 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) { TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) { using symbol_shorthand::F; - DiscreteKey m1(M(1), 2); Key z0 = Z(0), f01 = F(0); Key x0 = X(0), x1 = X(1); @@ -842,23 +832,22 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) { TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { const VectorValues measurements{{Z(0), Vector1(5.0)}}; - // Create mode key: 1 is low-noise, 0 is high-noise. - const DiscreteKey mode{M(0), 2}; HybridBayesNet bn; + // mode-dependent: 1 is low-noise, 0 is high-noise. // Create hybrid Gaussian factor z_0 = x0 + noise for each measurement. std::vector conditionals{ GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 0.5)}; - auto gm = std::make_shared(mode, conditionals); + auto gm = std::make_shared(m1, conditionals); bn.push_back(gm); // Create prior on X(0). bn.push_back( GaussianConditional::sharedMeanAndStddev(X(0), Vector1(5.0), 0.5)); - // Add prior on mode. - bn.emplace_shared(mode, "1/1"); + // Add prior on m1. + bn.emplace_shared(m1, "1/1"); // bn.print(); auto fg = bn.toFactorGraph(measurements); @@ -878,10 +867,10 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { conditional1 = std::make_shared( X(0), Vector1(14.1421), I_1x1 * 2.82843); expectedBayesNet.emplace_shared( - mode, std::vector{conditional0, conditional1}); + m1, std::vector{conditional0, conditional1}); - // Add prior on mode. - expectedBayesNet.emplace_shared(mode, "1/1"); + // Add prior on m1. + expectedBayesNet.emplace_shared(m1, "1/1"); // Test elimination const auto posterior = fg.eliminateSequential(); diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index 7572a64e2..1c72268de 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -24,13 +24,13 @@ #include #include #include +#include #include #include #include #include "Switching.h" -#include "gtsam/nonlinear/NonlinearFactor.h" // Include for test suite #include diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index d32312508..b5f04bb99 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -28,15 +28,13 @@ #include #include #include +#include #include #include #include #include -#include - #include "Switching.h" -#include "gtsam/nonlinear/NonlinearFactor.h" // Include for test suite #include diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index 5a04fc089..2452ebe6d 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -24,13 +24,13 @@ #include #include #include +#include #include #include #include #include "Switching.h" -#include "gtsam/nonlinear/NonlinearFactor.h" // Include for test suite #include From ea54525d375683d111f293878ff74406ec3bcd79 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 28 Sep 2024 19:35:11 -0700 Subject: [PATCH 372/398] Make hybrid elimination a method of HGFG --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 34 +++++++------------ gtsam/hybrid/HybridGaussianFactorGraph.h | 8 +++++ .../tests/testHybridGaussianFactorGraph.cpp | 33 +++++++++++++----- 3 files changed, 46 insertions(+), 29 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 7ab7893cb..787c0edd7 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -366,18 +366,17 @@ static std::shared_ptr createHybridGaussianFactor( return std::make_shared(discreteSeparator, newFactors); } -static std::pair> -hybridElimination(const HybridGaussianFactorGraph &factors, - const Ordering &frontalKeys, - const std::set &discreteSeparatorSet) { - // NOTE: since we use the special JunctionTree, - // only possibility is continuous conditioned on discrete. - DiscreteKeys discreteSeparator(discreteSeparatorSet.begin(), - discreteSeparatorSet.end()); +std::pair> +HybridGaussianFactorGraph::eliminate(const Ordering &keys) const { + // Since we eliminate all continuous variables first, + // the discrete separator will be *all* the discrete keys. + const std::set keysForDiscreteVariables = discreteKeys(); + DiscreteKeys discreteSeparator(keysForDiscreteVariables.begin(), + keysForDiscreteVariables.end()); // Collect all the factors to create a set of Gaussian factor graphs in a // decision tree indexed by all discrete keys involved. - GaussianFactorGraphTree factorGraphTree = factors.assembleGraphTree(); + GaussianFactorGraphTree factorGraphTree = assembleGraphTree(); // Convert factor graphs with a nullptr to an empty factor graph. // This is done after assembly since it is non-trivial to keep track of which @@ -392,7 +391,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, } // Expensive elimination of product factor. - auto result = EliminatePreferCholesky(graph, frontalKeys); + auto result = EliminatePreferCholesky(graph, keys); // Record whether there any continuous variables left someContinuousLeft |= !result.second->empty(); @@ -436,7 +435,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, */ std::pair> // EliminateHybrid(const HybridGaussianFactorGraph &factors, - const Ordering &frontalKeys) { + const Ordering &keys) { // NOTE: Because we are in the Conditional Gaussian regime there are only // a few cases: // 1. continuous variable, make a hybrid Gaussian conditional if there are @@ -510,20 +509,13 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, if (only_discrete) { // Case 1: we are only dealing with discrete - return discreteElimination(factors, frontalKeys); + return discreteElimination(factors, keys); } else if (only_continuous) { // Case 2: we are only dealing with continuous - return continuousElimination(factors, frontalKeys); + return continuousElimination(factors, keys); } else { // Case 3: We are now in the hybrid land! - KeySet frontalKeysSet(frontalKeys.begin(), frontalKeys.end()); - - // Find all discrete keys. - // Since we eliminate all continuous variables first, - // the discrete separator will be *all* the discrete keys. - std::set discreteSeparator = factors.discreteKeys(); - - return hybridElimination(factors, frontalKeys, discreteSeparator); + return factors.eliminate(keys); } } diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index d7aa1042f..923f48e38 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -217,6 +217,14 @@ class GTSAM_EXPORT HybridGaussianFactorGraph */ GaussianFactorGraphTree assembleGraphTree() const; + /** + * @brief Eliminate the given continuous keys. + * + * @param keys The continuous keys to eliminate. + * @return The conditional on the keys and a factor on the separator. + */ + std::pair, std::shared_ptr> + eliminate(const Ordering& keys) const; /// @} /// Get the GaussianFactorGraph at a given discrete assignment. diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 0421727ff..ee7ca5265 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -18,6 +18,7 @@ #include #include #include +#include #include #include #include @@ -42,6 +43,7 @@ #include #include #include +#include #include #include @@ -120,6 +122,25 @@ std::vector components(Key key) { } } // namespace two +/* ************************************************************************* */ +TEST(HybridGaussianFactorGraph, hybridEliminationOneFactor) { + HybridGaussianFactorGraph hfg; + hfg.add(HybridGaussianFactor(m1, two::components(X(1)))); + + auto result = hfg.eliminate({X(1)}); + + // Check that we have a valid Gaussian conditional. + auto hgc = result.first->asHybrid(); + CHECK(hgc); + const HybridValues values{{{X(1), Z_3x1}}, {{M(1), 1}}}; + EXPECT(HybridConditional::CheckInvariants(*result.first, values)); + + // Check that factor is discrete and correct + auto factor = std::dynamic_pointer_cast(result.second); + CHECK(factor); + EXPECT(assert_equal(DecisionTreeFactor{m1, "1 1"}, *factor)); +} + /* ************************************************************************* */ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { HybridGaussianFactorGraph hfg; @@ -221,20 +242,16 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1)); - { - hfg.add(HybridGaussianFactor({M(0), 2}, two::components(X(0)))); - hfg.add(HybridGaussianFactor({M(1), 2}, two::components(X(2)))); - } + hfg.add(HybridGaussianFactor({M(0), 2}, two::components(X(0)))); + hfg.add(HybridGaussianFactor({M(1), 2}, two::components(X(2)))); hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); hfg.add(JacobianFactor(X(3), I_3x3, X(4), -I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(4), I_3x3, X(5), -I_3x3, Z_3x1)); - { - hfg.add(HybridGaussianFactor({M(3), 2}, two::components(X(3)))); - hfg.add(HybridGaussianFactor({M(2), 2}, two::components(X(5)))); - } + hfg.add(HybridGaussianFactor({M(3), 2}, two::components(X(3)))); + hfg.add(HybridGaussianFactor({M(2), 2}, two::components(X(5)))); auto ordering_full = Ordering::ColamdConstrainedLast(hfg, {M(0), M(1), M(2), M(3)}); From 32c67892ed618f719fdaf4a1f76befde90ef6ab5 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 28 Sep 2024 20:09:24 -0700 Subject: [PATCH 373/398] Move test where it belongs --- gtsam/hybrid/tests/testHybridBayesNet.cpp | 51 +++++++++++++++++++ .../tests/testHybridGaussianFactorGraph.cpp | 50 ------------------ 2 files changed, 51 insertions(+), 50 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index ec8f08741..67005b470 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -444,6 +444,57 @@ TEST(HybridBayesNet, Sampling) { // num_samples))); } +/* ****************************************************************************/ +// Test hybrid gaussian factor graph errorTree when +// there is a HybridConditional in the graph +TEST(HybridBayesNet, ErrorTreeWithConditional) { + using symbol_shorthand::F; + + Key z0 = Z(0), f01 = F(0); + Key x0 = X(0), x1 = X(1); + + HybridBayesNet hbn; + + auto prior_model = noiseModel::Isotropic::Sigma(1, 1e-1); + auto measurement_model = noiseModel::Isotropic::Sigma(1, 2.0); + + // Set a prior P(x0) at x0=0 + hbn.emplace_shared(x0, Vector1(0.0), I_1x1, prior_model); + + // Add measurement P(z0 | x0) + hbn.emplace_shared(z0, Vector1(0.0), -I_1x1, x0, I_1x1, + measurement_model); + + // Add hybrid motion model + double mu = 0.0; + double sigma0 = 1e2, sigma1 = 1e-2; + auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); + auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); + auto c0 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, + x0, -I_1x1, model0), + c1 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, + x0, -I_1x1, model1); + DiscreteKey m1(M(2), 2); + hbn.emplace_shared(m1, std::vector{c0, c1}); + + // Discrete uniform prior. + hbn.emplace_shared(m1, "0.5/0.5"); + + VectorValues given; + given.insert(z0, Vector1(0.0)); + given.insert(f01, Vector1(0.0)); + auto gfg = hbn.toFactorGraph(given); + + VectorValues vv; + vv.insert(x0, Vector1(1.0)); + vv.insert(x1, Vector1(2.0)); + AlgebraicDecisionTree errorTree = gfg.errorTree(vv); + + // regression + AlgebraicDecisionTree expected(m1, 59.335390372, 5050.125); + EXPECT(assert_equal(expected, errorTree, 1e-9)); +} + /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index ee7ca5265..38429ac60 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -628,56 +628,6 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) { EXPECT(assert_equal(expected_probabilities, probabilities, 1e-7)); } -/* ****************************************************************************/ -// Test hybrid gaussian factor graph errorTree when -// there is a HybridConditional in the graph -TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) { - using symbol_shorthand::F; - - Key z0 = Z(0), f01 = F(0); - Key x0 = X(0), x1 = X(1); - - HybridBayesNet hbn; - - auto prior_model = noiseModel::Isotropic::Sigma(1, 1e-1); - auto measurement_model = noiseModel::Isotropic::Sigma(1, 2.0); - - // Set a prior P(x0) at x0=0 - hbn.emplace_shared(x0, Vector1(0.0), I_1x1, prior_model); - - // Add measurement P(z0 | x0) - hbn.emplace_shared(z0, Vector1(0.0), -I_1x1, x0, I_1x1, - measurement_model); - - // Add hybrid motion model - double mu = 0.0; - double sigma0 = 1e2, sigma1 = 1e-2; - auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); - auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); - auto c0 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, - x0, -I_1x1, model0), - c1 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, - x0, -I_1x1, model1); - hbn.emplace_shared(m1, std::vector{c0, c1}); - - // Discrete uniform prior. - hbn.emplace_shared(m1, "0.5/0.5"); - - VectorValues given; - given.insert(z0, Vector1(0.0)); - given.insert(f01, Vector1(0.0)); - auto gfg = hbn.toFactorGraph(given); - - VectorValues vv; - vv.insert(x0, Vector1(1.0)); - vv.insert(x1, Vector1(2.0)); - AlgebraicDecisionTree errorTree = gfg.errorTree(vv); - - // regression - AlgebraicDecisionTree expected(m1, 59.335390372, 5050.125); - EXPECT(assert_equal(expected, errorTree, 1e-9)); -} - /* ****************************************************************************/ // Test hybrid gaussian factor graph errorTree during // incremental operation From e690ff817ad49e07401479e72a2d325a09ed612d Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 28 Sep 2024 20:47:22 -0700 Subject: [PATCH 374/398] Create mean/stddev constructors --- gtsam/hybrid/HybridGaussianConditional.cpp | 79 +++++++++++++++++----- gtsam/hybrid/HybridGaussianConditional.h | 32 +++++++-- gtsam/hybrid/tests/testGaussianMixture.cpp | 31 +++------ 3 files changed, 97 insertions(+), 45 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 4e896206f..d0f812d73 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -29,16 +29,20 @@ #include +#include "gtsam/linear/JacobianFactor.h" + namespace gtsam { /* *******************************************************************************/ struct HybridGaussianConditional::ConstructorHelper { std::optional nrFrontals; - HybridGaussianFactor::FactorValuePairs pairs; + FactorValuePairs pairs; + Conditionals conditionals; double minNegLogConstant; - /// Compute all variables needed for the private constructor below. + /// Construct from tree of GaussianConditionals. ConstructorHelper(const Conditionals &conditionals) - : minNegLogConstant(std::numeric_limits::infinity()) { + : conditionals(conditionals), + minNegLogConstant(std::numeric_limits::infinity()) { auto func = [this](const GaussianConditional::shared_ptr &c) -> GaussianFactorValuePair { double value = 0.0; @@ -51,38 +55,79 @@ struct HybridGaussianConditional::ConstructorHelper { } return {std::dynamic_pointer_cast(c), value}; }; - pairs = HybridGaussianFactor::FactorValuePairs(conditionals, func); + pairs = FactorValuePairs(conditionals, func); if (!nrFrontals.has_value()) { throw std::runtime_error( "HybridGaussianConditional: need at least one frontal variable."); } } + + /// Construct from means and a single sigma. + ConstructorHelper(Key x, const DiscreteKey mode, + const std::vector &means, double sigma) + : nrFrontals(1), minNegLogConstant(0) { + std::vector gcs; + for (const auto &mean : means) { + auto c = GaussianConditional::sharedMeanAndStddev(x, mean, sigma); + gcs.push_back(c); + } + conditionals = Conditionals({mode}, gcs); + pairs = FactorValuePairs(conditionals, [](const auto &c) { + return GaussianFactorValuePair{c, 0.0}; + }); + } + + /// Construct from means and a sigmas. + ConstructorHelper(Key x, const DiscreteKey mode, + const std::vector> ¶meters) + : nrFrontals(1), + minNegLogConstant(std::numeric_limits::infinity()) { + std::vector gcs; + std::vector fvs; + for (const auto &[mean, sigma] : parameters) { + auto c = GaussianConditional::sharedMeanAndStddev(x, mean, sigma); + double value = c->negLogConstant(); + minNegLogConstant = std::min(minNegLogConstant, value); + gcs.push_back(c); + fvs.push_back({c, value}); + } + conditionals = Conditionals({mode}, gcs); + pairs = FactorValuePairs({mode}, fvs); + } }; /* *******************************************************************************/ HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKeys &discreteParents, - const HybridGaussianConditional::Conditionals &conditionals, - const ConstructorHelper &helper) + const DiscreteKeys &discreteParents, const ConstructorHelper &helper) : BaseFactor(discreteParents, helper.pairs), BaseConditional(*helper.nrFrontals), - conditionals_(conditionals), + conditionals_(helper.conditionals), negLogConstant_(helper.minNegLogConstant) {} -/* *******************************************************************************/ +HybridGaussianConditional::HybridGaussianConditional( + const DiscreteKey &mode, + const std::vector &conditionals) + : HybridGaussianConditional(DiscreteKeys{mode}, + Conditionals({mode}, conditionals)) {} + +HybridGaussianConditional::HybridGaussianConditional( + Key x, const DiscreteKey mode, const std::vector &means, + double sigma) + : HybridGaussianConditional(DiscreteKeys{mode}, + ConstructorHelper(x, mode, means, sigma)) {} + +HybridGaussianConditional::HybridGaussianConditional( + Key x, const DiscreteKey mode, + const std::vector> ¶meters) + : HybridGaussianConditional(DiscreteKeys{mode}, + ConstructorHelper(x, mode, parameters)) {} + HybridGaussianConditional::HybridGaussianConditional( const DiscreteKeys &discreteParents, const HybridGaussianConditional::Conditionals &conditionals) - : HybridGaussianConditional(discreteParents, conditionals, + : HybridGaussianConditional(discreteParents, ConstructorHelper(conditionals)) {} -/* *******************************************************************************/ -HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKey &discreteParent, - const std::vector &conditionals) - : HybridGaussianConditional(DiscreteKeys{discreteParent}, - Conditionals({discreteParent}, conditionals)) {} - /* *******************************************************************************/ const HybridGaussianConditional::Conditionals & HybridGaussianConditional::conditionals() const { diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 02f6bba1e..662318837 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -79,14 +79,36 @@ class GTSAM_EXPORT HybridGaussianConditional /** * @brief Construct from one discrete key and vector of conditionals. * - * @param discreteParent Single discrete parent variable + * @param mode Single discrete parent variable * @param conditionals Vector of conditionals with the same size as the * cardinality of the discrete parent. */ HybridGaussianConditional( - const DiscreteKey &discreteParent, + const DiscreteKey &mode, const std::vector &conditionals); + /** + * @brief Construct from vector of means and a single sigma. + * + * @param x The continuous key. + * @param mode The discrete key. + * @param means The means for the Gaussian conditionals. + * @param sigma The standard deviation for the Gaussian conditionals. + */ + HybridGaussianConditional(Key x, const DiscreteKey mode, + const std::vector &means, double sigma); + + /** + * @brief Construct from vector of means and sigmas. + * + * @param x The continuous key. + * @param mode The discrete key. + * @param parameters The means and sigmas for the Gaussian conditionals. + */ + HybridGaussianConditional( + Key x, const DiscreteKey mode, + const std::vector> ¶meters); + /** * @brief Construct from multiple discrete keys and conditional tree. * @@ -186,10 +208,8 @@ class GTSAM_EXPORT HybridGaussianConditional struct ConstructorHelper; /// Private constructor that uses helper struct above. - HybridGaussianConditional( - const DiscreteKeys &discreteParents, - const HybridGaussianConditional::Conditionals &conditionals, - const ConstructorHelper &helper); + HybridGaussianConditional(const DiscreteKeys &discreteParents, + const ConstructorHelper &helper); /// Convert to a DecisionTree of Gaussian factor graphs. GaussianFactorGraphTree asGaussianFactorGraphTree() const; diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index cde7e4063..87ef5e25d 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -43,26 +43,6 @@ const DiscreteValues m1Assignment{{M(0), 1}}; DiscreteConditional::shared_ptr mixing = std::make_shared(m, "60/40"); -/** - * Create a simple Gaussian Mixture Model represented as p(z|m)P(m) - * where m is a discrete variable and z is a continuous variable. - * The "mode" m is binary and depending on m, we have 2 different means - * μ1 and μ2 for the Gaussian density p(z|m). - */ -HybridBayesNet GaussianMixtureModel(double mu0, double mu1, double sigma0, - double sigma1) { - HybridBayesNet hbn; - auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); - auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); - auto c0 = std::make_shared(Z(0), Vector1(mu0), I_1x1, - model0), - c1 = std::make_shared(Z(0), Vector1(mu1), I_1x1, - model1); - hbn.emplace_shared(m, std::vector{c0, c1}); - hbn.push_back(mixing); - return hbn; -} - /// Gaussian density function double Gaussian(double mu, double sigma, double z) { return exp(-0.5 * pow((z - mu) / sigma, 2)) / sqrt(2 * M_PI * sigma * sigma); @@ -99,7 +79,10 @@ TEST(GaussianMixture, GaussianMixtureModel) { double mu0 = 1.0, mu1 = 3.0; double sigma = 2.0; - auto hbn = GaussianMixtureModel(mu0, mu1, sigma, sigma); + HybridBayesNet hbn; + std::vector means{Vector1(mu0), Vector1(mu1)}; + hbn.emplace_shared(Z(0), m, means, sigma); + hbn.push_back(mixing); // At the halfway point between the means, we should get P(m|z)=0.5 double midway = mu1 - mu0; @@ -133,7 +116,11 @@ TEST(GaussianMixture, GaussianMixtureModel2) { double mu0 = 1.0, mu1 = 3.0; double sigma0 = 8.0, sigma1 = 4.0; - auto hbn = GaussianMixtureModel(mu0, mu1, sigma0, sigma1); + HybridBayesNet hbn; + std::vector> parameters{{Vector1(mu0), sigma0}, + {Vector1(mu1), sigma1}}; + hbn.emplace_shared(Z(0), m, parameters); + hbn.push_back(mixing); // We get zMax=3.1333 by finding the maximum value of the function, at which // point the mode m==1 is about twice as probable as m==0. From 85e57f1c20839bdf2e7ed12e2388488faacf4b7a Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 28 Sep 2024 22:05:07 -0700 Subject: [PATCH 375/398] Use new constructors --- gtsam/hybrid/HybridBayesNet.h | 7 +- gtsam/hybrid/tests/testHybridBayesNet.cpp | 105 ++++++++-------------- 2 files changed, 45 insertions(+), 67 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index d5d50afd2..22a43f3bd 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -44,9 +44,14 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { /// @name Standard Constructors /// @{ - /** Construct empty Bayes net */ + /// Construct empty Bayes net. HybridBayesNet() = default; + /// Constructor that takes an initializer list of shared pointers. + HybridBayesNet( + std::initializer_list conditionals) + : Base(conditionals) {} + /// @} /// @name Testable /// @{ diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 67005b470..e52d611bd 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -20,6 +20,7 @@ #include #include +#include #include #include @@ -32,7 +33,6 @@ using namespace std; using namespace gtsam; -using noiseModel::Isotropic; using symbol_shorthand::M; using symbol_shorthand::X; using symbol_shorthand::Z; @@ -91,40 +91,52 @@ TEST(HybridBayesNet, Tiny) { EXPECT_DOUBLES_EQUAL(ratio[0], ratio[1], 1e-8); } +/* ****************************************************************************/ +// Hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia). +namespace different_sigmas { +const auto gc = GaussianConditional::sharedMeanAndStddev(X(0), 2 * I_1x1, X(1), + Vector1(-4.0), 5.0); + +const std::vector> parms{{Vector1(5), 2.0}, + {Vector1(2), 3.0}}; +const auto hgc = std::make_shared(X(1), Asia, parms); + +const auto prior = std::make_shared(Asia, "99/1"); +auto wrap = [](const auto& c) { + return std::make_shared(c); +}; +const HybridBayesNet bayesNet{wrap(gc), wrap(hgc), wrap(prior)}; + +// Create values at which to evaluate. +HybridValues values{{{X(0), Vector1(-6)}, {X(1), Vector1(1)}}, {{asiaKey, 0}}}; +} // namespace different_sigmas + /* ****************************************************************************/ // Test evaluate for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia). TEST(HybridBayesNet, evaluateHybrid) { - const auto continuousConditional = GaussianConditional::sharedMeanAndStddev( - X(0), 2 * I_1x1, X(1), Vector1(-4.0), 5.0); + using namespace different_sigmas; - const SharedDiagonal model0 = noiseModel::Diagonal::Sigmas(Vector1(2.0)), - model1 = noiseModel::Diagonal::Sigmas(Vector1(3.0)); - - const auto conditional0 = std::make_shared( - X(1), Vector1::Constant(5), I_1x1, model0), - conditional1 = std::make_shared( - X(1), Vector1::Constant(2), I_1x1, model1); - - // Create hybrid Bayes net. - HybridBayesNet bayesNet; - bayesNet.push_back(continuousConditional); - bayesNet.emplace_shared( - Asia, std::vector{conditional0, conditional1}); - bayesNet.emplace_shared(Asia, "99/1"); - - // Create values at which to evaluate. - HybridValues values; - values.insert(asiaKey, 0); - values.insert(X(0), Vector1(-6)); - values.insert(X(1), Vector1(1)); - - const double conditionalProbability = - continuousConditional->evaluate(values.continuous()); - const double mixtureProbability = conditional0->evaluate(values.continuous()); + const double conditionalProbability = gc->evaluate(values.continuous()); + const double mixtureProbability = hgc->evaluate(values); EXPECT_DOUBLES_EQUAL(conditionalProbability * mixtureProbability * 0.99, bayesNet.evaluate(values), 1e-9); } +/* ****************************************************************************/ +// Test error for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia). +TEST(HybridBayesNet, Error) { + using namespace different_sigmas; + + AlgebraicDecisionTree actual = bayesNet.errorTree(values.continuous()); + + // Regression. + // Manually added all the error values from the 3 conditional types. + AlgebraicDecisionTree expected( + {Asia}, std::vector{2.33005033585, 5.38619084965}); + + EXPECT(assert_equal(expected, actual)); +} + /* ****************************************************************************/ // Test choosing an assignment of conditionals TEST(HybridBayesNet, Choose) { @@ -154,45 +166,6 @@ TEST(HybridBayesNet, Choose) { *gbn.at(3))); } -/* ****************************************************************************/ -// Test error for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia). -TEST(HybridBayesNet, Error) { - const auto continuousConditional = GaussianConditional::sharedMeanAndStddev( - X(0), 2 * I_1x1, X(1), Vector1(-4.0), 5.0); - - const SharedDiagonal model0 = noiseModel::Diagonal::Sigmas(Vector1(2.0)), - model1 = noiseModel::Diagonal::Sigmas(Vector1(3.0)); - - const auto conditional0 = std::make_shared( - X(1), Vector1::Constant(5), I_1x1, model0), - conditional1 = std::make_shared( - X(1), Vector1::Constant(2), I_1x1, model1); - - auto gm = std::make_shared( - Asia, std::vector{conditional0, conditional1}); - // Create hybrid Bayes net. - HybridBayesNet bayesNet; - bayesNet.push_back(continuousConditional); - bayesNet.push_back(gm); - bayesNet.emplace_shared(Asia, "99/1"); - - // Create values at which to evaluate. - HybridValues values; - values.insert(asiaKey, 0); - values.insert(X(0), Vector1(-6)); - values.insert(X(1), Vector1(1)); - - AlgebraicDecisionTree actual_errors = - bayesNet.errorTree(values.continuous()); - - // Regression. - // Manually added all the error values from the 3 conditional types. - AlgebraicDecisionTree expected_errors( - {Asia}, std::vector{2.33005033585, 5.38619084965}); - - EXPECT(assert_equal(expected_errors, actual_errors)); -} - /* ****************************************************************************/ // Test Bayes net optimize TEST(HybridBayesNet, OptimizeAssignment) { From 3a5d7a46480559e315e437ab5d54de4e51704a31 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 28 Sep 2024 22:15:35 -0700 Subject: [PATCH 376/398] Remove one bougie constructor --- gtsam/hybrid/HybridGaussianConditional.cpp | 21 --------------------- gtsam/hybrid/HybridGaussianConditional.h | 11 ----------- gtsam/hybrid/tests/testGaussianMixture.cpp | 5 +++-- 3 files changed, 3 insertions(+), 34 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index d0f812d73..5e80f802a 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -62,21 +62,6 @@ struct HybridGaussianConditional::ConstructorHelper { } } - /// Construct from means and a single sigma. - ConstructorHelper(Key x, const DiscreteKey mode, - const std::vector &means, double sigma) - : nrFrontals(1), minNegLogConstant(0) { - std::vector gcs; - for (const auto &mean : means) { - auto c = GaussianConditional::sharedMeanAndStddev(x, mean, sigma); - gcs.push_back(c); - } - conditionals = Conditionals({mode}, gcs); - pairs = FactorValuePairs(conditionals, [](const auto &c) { - return GaussianFactorValuePair{c, 0.0}; - }); - } - /// Construct from means and a sigmas. ConstructorHelper(Key x, const DiscreteKey mode, const std::vector> ¶meters) @@ -110,12 +95,6 @@ HybridGaussianConditional::HybridGaussianConditional( : HybridGaussianConditional(DiscreteKeys{mode}, Conditionals({mode}, conditionals)) {} -HybridGaussianConditional::HybridGaussianConditional( - Key x, const DiscreteKey mode, const std::vector &means, - double sigma) - : HybridGaussianConditional(DiscreteKeys{mode}, - ConstructorHelper(x, mode, means, sigma)) {} - HybridGaussianConditional::HybridGaussianConditional( Key x, const DiscreteKey mode, const std::vector> ¶meters) diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 662318837..79cafece7 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -87,17 +87,6 @@ class GTSAM_EXPORT HybridGaussianConditional const DiscreteKey &mode, const std::vector &conditionals); - /** - * @brief Construct from vector of means and a single sigma. - * - * @param x The continuous key. - * @param mode The discrete key. - * @param means The means for the Gaussian conditionals. - * @param sigma The standard deviation for the Gaussian conditionals. - */ - HybridGaussianConditional(Key x, const DiscreteKey mode, - const std::vector &means, double sigma); - /** * @brief Construct from vector of means and sigmas. * diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index 87ef5e25d..7d227f2d7 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -80,8 +80,9 @@ TEST(GaussianMixture, GaussianMixtureModel) { double sigma = 2.0; HybridBayesNet hbn; - std::vector means{Vector1(mu0), Vector1(mu1)}; - hbn.emplace_shared(Z(0), m, means, sigma); + std::vector> parameters{{Vector1(mu0), sigma}, + {Vector1(mu1), sigma}}; + hbn.emplace_shared(Z(0), m, parameters); hbn.push_back(mixing); // At the halfway point between the means, we should get P(m|z)=0.5 From ad9fd1969e994ac845d853382ab54b289e059f51 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 28 Sep 2024 23:19:02 -0700 Subject: [PATCH 377/398] Add conditional constructors as well --- gtsam/hybrid/HybridGaussianConditional.cpp | 74 ++++++++++++++++--- gtsam/hybrid/HybridGaussianConditional.h | 26 ++++--- gtsam/hybrid/tests/TinyHybridExample.h | 8 +- gtsam/hybrid/tests/testGaussianMixture.cpp | 4 +- gtsam/hybrid/tests/testHybridBayesNet.cpp | 2 +- gtsam/hybrid/tests/testHybridEstimation.cpp | 20 ++--- .../tests/testHybridGaussianFactorGraph.cpp | 30 +++----- gtsam/hybrid/tests/testHybridMotionModel.cpp | 11 +-- 8 files changed, 110 insertions(+), 65 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 5e80f802a..998afb1a3 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -33,14 +33,14 @@ namespace gtsam { /* *******************************************************************************/ -struct HybridGaussianConditional::ConstructorHelper { +struct HybridGaussianConditional::Helper { std::optional nrFrontals; FactorValuePairs pairs; Conditionals conditionals; double minNegLogConstant; /// Construct from tree of GaussianConditionals. - ConstructorHelper(const Conditionals &conditionals) + Helper(const Conditionals &conditionals) : conditionals(conditionals), minNegLogConstant(std::numeric_limits::infinity()) { auto func = [this](const GaussianConditional::shared_ptr &c) @@ -63,14 +63,54 @@ struct HybridGaussianConditional::ConstructorHelper { } /// Construct from means and a sigmas. - ConstructorHelper(Key x, const DiscreteKey mode, - const std::vector> ¶meters) + Helper(const DiscreteKey mode, Key key, + const std::vector> ¶meters) : nrFrontals(1), minNegLogConstant(std::numeric_limits::infinity()) { std::vector gcs; std::vector fvs; for (const auto &[mean, sigma] : parameters) { - auto c = GaussianConditional::sharedMeanAndStddev(x, mean, sigma); + auto c = GaussianConditional::sharedMeanAndStddev(key, mean, sigma); + double value = c->negLogConstant(); + minNegLogConstant = std::min(minNegLogConstant, value); + gcs.push_back(c); + fvs.push_back({c, value}); + } + conditionals = Conditionals({mode}, gcs); + pairs = FactorValuePairs({mode}, fvs); + } + + /// Construct from means and a sigmas. + Helper(const DiscreteKey mode, Key key, // + const Matrix &A, Key parent, + const std::vector> ¶meters) + : nrFrontals(1), + minNegLogConstant(std::numeric_limits::infinity()) { + std::vector gcs; + std::vector fvs; + for (const auto &[mean, sigma] : parameters) { + auto c = + GaussianConditional::sharedMeanAndStddev(key, A, parent, mean, sigma); + double value = c->negLogConstant(); + minNegLogConstant = std::min(minNegLogConstant, value); + gcs.push_back(c); + fvs.push_back({c, value}); + } + conditionals = Conditionals({mode}, gcs); + pairs = FactorValuePairs({mode}, fvs); + } + + /// Construct from means and a sigmas. + Helper(const DiscreteKey mode, Key key, // + const Matrix &A1, Key parent1, const Matrix &A2, Key parent2, + const std::vector> ¶meters) + : nrFrontals(1), + minNegLogConstant(std::numeric_limits::infinity()) { + std::vector gcs; + std::vector fvs; + for (const auto &[mean, sigma] : parameters) { + auto c = GaussianConditional::sharedMeanAndStddev(key, A1, parent1, A2, + parent2, mean, sigma); double value = c->negLogConstant(); minNegLogConstant = std::min(minNegLogConstant, value); gcs.push_back(c); @@ -83,7 +123,7 @@ struct HybridGaussianConditional::ConstructorHelper { /* *******************************************************************************/ HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKeys &discreteParents, const ConstructorHelper &helper) + const DiscreteKeys &discreteParents, const Helper &helper) : BaseFactor(discreteParents, helper.pairs), BaseConditional(*helper.nrFrontals), conditionals_(helper.conditionals), @@ -96,16 +136,30 @@ HybridGaussianConditional::HybridGaussianConditional( Conditionals({mode}, conditionals)) {} HybridGaussianConditional::HybridGaussianConditional( - Key x, const DiscreteKey mode, + const DiscreteKey mode, Key key, // const std::vector> ¶meters) : HybridGaussianConditional(DiscreteKeys{mode}, - ConstructorHelper(x, mode, parameters)) {} + Helper(mode, key, parameters)) {} + +HybridGaussianConditional::HybridGaussianConditional( + const DiscreteKey mode, Key key, // + const Matrix &A, Key parent, + const std::vector> ¶meters) + : HybridGaussianConditional(DiscreteKeys{mode}, + Helper(mode, key, A, parent, parameters)) {} + +HybridGaussianConditional::HybridGaussianConditional( + const DiscreteKey mode, Key key, // + const Matrix &A1, Key parent1, const Matrix &A2, Key parent2, + const std::vector> ¶meters) + : HybridGaussianConditional( + DiscreteKeys{mode}, + Helper(mode, key, A1, parent1, A2, parent2, parameters)) {} HybridGaussianConditional::HybridGaussianConditional( const DiscreteKeys &discreteParents, const HybridGaussianConditional::Conditionals &conditionals) - : HybridGaussianConditional(discreteParents, - ConstructorHelper(conditionals)) {} + : HybridGaussianConditional(discreteParents, Helper(conditionals)) {} /* *******************************************************************************/ const HybridGaussianConditional::Conditionals & diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 79cafece7..7c14f0008 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -87,15 +87,21 @@ class GTSAM_EXPORT HybridGaussianConditional const DiscreteKey &mode, const std::vector &conditionals); - /** - * @brief Construct from vector of means and sigmas. - * - * @param x The continuous key. - * @param mode The discrete key. - * @param parameters The means and sigmas for the Gaussian conditionals. - */ + /// Construct from mean `mu_i` and `sigma_i`. HybridGaussianConditional( - Key x, const DiscreteKey mode, + const DiscreteKey mode, Key key, // + const std::vector> ¶meters); + + /// Construct from conditional mean `A1 p1 + b_i` and `sigma_i`. + HybridGaussianConditional( + const DiscreteKey mode, Key key, // + const Matrix &A, Key parent, + const std::vector> ¶meters); + + /// Construct from conditional mean `A1 p1 + A2 p2 + b_i` and `sigma_i`. + HybridGaussianConditional( + const DiscreteKey mode, Key key, // + const Matrix &A1, Key parent1, const Matrix &A2, Key parent2, const std::vector> ¶meters); /** @@ -194,11 +200,11 @@ class GTSAM_EXPORT HybridGaussianConditional private: /// Helper struct for private constructor. - struct ConstructorHelper; + struct Helper; /// Private constructor that uses helper struct above. HybridGaussianConditional(const DiscreteKeys &discreteParents, - const ConstructorHelper &helper); + const Helper &helper); /// Convert to a DecisionTree of Gaussian factor graphs. GaussianFactorGraphTree asGaussianFactorGraphTree() const; diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index f2ff7dde2..1862e8a31 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -41,12 +41,12 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1, HybridBayesNet bayesNet; // Create hybrid Gaussian factor z_i = x0 + noise for each measurement. + std::vector> measurementModels{{Z_1x1, 0.5}, + {Z_1x1, 3.0}}; for (size_t i = 0; i < num_measurements; i++) { const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode; - std::vector conditionals{ - GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 0.5), - GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 3)}; - bayesNet.emplace_shared(mode_i, conditionals); + bayesNet.emplace_shared(mode_i, Z(i), I_1x1, + X(0), measurementModels); } // Create prior on X(0). diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index 7d227f2d7..d6eac9c38 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -82,7 +82,7 @@ TEST(GaussianMixture, GaussianMixtureModel) { HybridBayesNet hbn; std::vector> parameters{{Vector1(mu0), sigma}, {Vector1(mu1), sigma}}; - hbn.emplace_shared(Z(0), m, parameters); + hbn.emplace_shared(m, Z(0), parameters); hbn.push_back(mixing); // At the halfway point between the means, we should get P(m|z)=0.5 @@ -120,7 +120,7 @@ TEST(GaussianMixture, GaussianMixtureModel2) { HybridBayesNet hbn; std::vector> parameters{{Vector1(mu0), sigma0}, {Vector1(mu1), sigma1}}; - hbn.emplace_shared(Z(0), m, parameters); + hbn.emplace_shared(m, Z(0), parameters); hbn.push_back(mixing); // We get zMax=3.1333 by finding the maximum value of the function, at which diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index e52d611bd..c33ff8454 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -99,7 +99,7 @@ const auto gc = GaussianConditional::sharedMeanAndStddev(X(0), 2 * I_1x1, X(1), const std::vector> parms{{Vector1(5), 2.0}, {Vector1(2), 3.0}}; -const auto hgc = std::make_shared(X(1), Asia, parms); +const auto hgc = std::make_shared(Asia, X(1), parms); const auto prior = std::make_shared(Asia, "99/1"); auto wrap = [](const auto& c) { diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 672f47503..25d697451 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -573,12 +573,10 @@ TEST(HybridEstimation, ModeSelection) { bn.push_back( GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1)); - std::vector conditionals{ - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), - Z_1x1, noise_loose), - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), - Z_1x1, noise_tight)}; - bn.emplace_shared(mode, conditionals); + std::vector> parameters{{Z_1x1, noise_loose}, + {Z_1x1, noise_tight}}; + bn.emplace_shared(mode, Z(0), I_1x1, X(0), -I_1x1, + X(1), parameters); VectorValues vv; vv.insert(Z(0), Z_1x1); @@ -605,12 +603,10 @@ TEST(HybridEstimation, ModeSelection2) { bn.push_back( GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1)); - std::vector conditionals{ - GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1), - Z_3x1, noise_loose), - GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1), - Z_3x1, noise_tight)}; - bn.emplace_shared(mode, conditionals); + std::vector> parameters{{Z_3x1, noise_loose}, + {Z_3x1, noise_tight}}; + bn.emplace_shared(mode, Z(0), I_3x3, X(0), -I_3x3, + X(1), parameters); VectorValues vv; vv.insert(Z(0), Z_3x1); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 38429ac60..6aef60386 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -803,11 +803,8 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { // mode-dependent: 1 is low-noise, 0 is high-noise. // Create hybrid Gaussian factor z_0 = x0 + noise for each measurement. - std::vector conditionals{ - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3), - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 0.5)}; - auto gm = std::make_shared(m1, conditionals); - bn.push_back(gm); + std::vector> parms{{Z_1x1, 3}, {Z_1x1, 0.5}}; + bn.emplace_shared(m1, Z(0), I_1x1, X(0), parms); // Create prior on X(0). bn.push_back( @@ -912,32 +909,27 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { // NOTE: we add reverse topological so we can sample from the Bayes net.: // Add measurements: + std::vector> measurementModels{{Z_1x1, 3}, + {Z_1x1, 0.5}}; for (size_t t : {0, 1, 2}) { // Create hybrid Gaussian factor on Z(t) conditioned on X(t) and mode N(t): const auto noise_mode_t = DiscreteKey{N(t), 2}; - std::vector conditionals{ - GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), Z_1x1, 0.5), - GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), Z_1x1, - 3.0)}; - bn.emplace_shared(noise_mode_t, conditionals); + bn.emplace_shared(noise_mode_t, Z(t), I_1x1, + X(t), measurementModels); // Create prior on discrete mode N(t): bn.emplace_shared(noise_mode_t, "20/80"); } - // Add motion models: + // Add motion models. TODO(frank): why are they exactly the same? + std::vector> motionModels{{Z_1x1, 0.2}, + {Z_1x1, 0.2}}; for (size_t t : {2, 1}) { // Create hybrid Gaussian factor on X(t) conditioned on X(t-1) // and mode M(t-1): const auto motion_model_t = DiscreteKey{M(t), 2}; - std::vector conditionals{ - GaussianConditional::sharedMeanAndStddev(X(t), I_1x1, X(t - 1), Z_1x1, - 0.2), - GaussianConditional::sharedMeanAndStddev(X(t), I_1x1, X(t - 1), I_1x1, - 0.2)}; - auto gm = std::make_shared(motion_model_t, - conditionals); - bn.push_back(gm); + bn.emplace_shared(motion_model_t, X(t), I_1x1, + X(t - 1), motionModels); // Create prior on motion model M(t): bn.emplace_shared(motion_model_t, "40/60"); diff --git a/gtsam/hybrid/tests/testHybridMotionModel.cpp b/gtsam/hybrid/tests/testHybridMotionModel.cpp index 1a2a09e15..cbfdc7fe4 100644 --- a/gtsam/hybrid/tests/testHybridMotionModel.cpp +++ b/gtsam/hybrid/tests/testHybridMotionModel.cpp @@ -55,13 +55,10 @@ void addMeasurement(HybridBayesNet &hbn, Key z_key, Key x_key, double sigma) { /// Create hybrid motion model p(x1 | x0, m1) static HybridGaussianConditional::shared_ptr CreateHybridMotionModel( double mu0, double mu1, double sigma0, double sigma1) { - auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); - auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); - auto c0 = make_shared(X(1), Vector1(mu0), I_1x1, X(0), - -I_1x1, model0), - c1 = make_shared(X(1), Vector1(mu1), I_1x1, X(0), - -I_1x1, model1); - return std::make_shared(m1, std::vector{c0, c1}); + std::vector> motionModels{{Vector1(mu0), sigma0}, + {Vector1(mu1), sigma1}}; + return std::make_shared(m1, X(1), I_1x1, X(0), + motionModels); } /// Create two state Bayes network with 1 or two measurement models From 14d0f9f1ef4dde40a8d510fa918c3097b6e8bec6 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 28 Sep 2024 23:51:58 -0700 Subject: [PATCH 378/398] Avoid copy/pasta --- gtsam/hybrid/HybridGaussianConditional.cpp | 105 +++++++++------------ 1 file changed, 45 insertions(+), 60 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 998afb1a3..068bd2e5d 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -39,12 +39,55 @@ struct HybridGaussianConditional::Helper { Conditionals conditionals; double minNegLogConstant; + using GC = GaussianConditional; + using P = std::vector>; + + // Common code for three constructors below: + template + void initialize(const DiscreteKey &mode, const P &p, Create create) { + nrFrontals = 1; + minNegLogConstant = std::numeric_limits::infinity(); + + std::vector fvs; + std::vector gcs; + for (const auto &[mean, sigma] : p) { + auto c = create(mean, sigma); + double value = c->negLogConstant(); + minNegLogConstant = std::min(minNegLogConstant, value); + fvs.push_back({c, value}); + gcs.push_back(c); + } + + conditionals = Conditionals({mode}, gcs); + pairs = FactorValuePairs({mode}, fvs); + } + + // Constructors for different types of GaussianConditionals: + + Helper(const DiscreteKey &mode, Key x0, const P &p) { + initialize(mode, p, [x0](const Vector &mean, double sigma) { + return GC::sharedMeanAndStddev(x0, mean, sigma); + }); + } + + Helper(const DiscreteKey &mode, Key x0, const Matrix &A, Key x1, const P &p) { + initialize(mode, p, [x0, A, x1](const Vector &mean, double sigma) { + return GC::sharedMeanAndStddev(x0, A, x1, mean, sigma); + }); + } + + Helper(const DiscreteKey &mode, Key x0, // + const Matrix &A1, Key x1, const Matrix &A2, Key x2, const P &p) { + initialize(mode, p, [x0, A1, x1, A2, x2](const Vector &mean, double sigma) { + return GC::sharedMeanAndStddev(x0, A1, x1, A2, x2, mean, sigma); + }); + } + /// Construct from tree of GaussianConditionals. Helper(const Conditionals &conditionals) : conditionals(conditionals), minNegLogConstant(std::numeric_limits::infinity()) { - auto func = [this](const GaussianConditional::shared_ptr &c) - -> GaussianFactorValuePair { + auto func = [this](const GC::shared_ptr &c) -> GaussianFactorValuePair { double value = 0.0; if (c) { if (!nrFrontals.has_value()) { @@ -61,64 +104,6 @@ struct HybridGaussianConditional::Helper { "HybridGaussianConditional: need at least one frontal variable."); } } - - /// Construct from means and a sigmas. - Helper(const DiscreteKey mode, Key key, - const std::vector> ¶meters) - : nrFrontals(1), - minNegLogConstant(std::numeric_limits::infinity()) { - std::vector gcs; - std::vector fvs; - for (const auto &[mean, sigma] : parameters) { - auto c = GaussianConditional::sharedMeanAndStddev(key, mean, sigma); - double value = c->negLogConstant(); - minNegLogConstant = std::min(minNegLogConstant, value); - gcs.push_back(c); - fvs.push_back({c, value}); - } - conditionals = Conditionals({mode}, gcs); - pairs = FactorValuePairs({mode}, fvs); - } - - /// Construct from means and a sigmas. - Helper(const DiscreteKey mode, Key key, // - const Matrix &A, Key parent, - const std::vector> ¶meters) - : nrFrontals(1), - minNegLogConstant(std::numeric_limits::infinity()) { - std::vector gcs; - std::vector fvs; - for (const auto &[mean, sigma] : parameters) { - auto c = - GaussianConditional::sharedMeanAndStddev(key, A, parent, mean, sigma); - double value = c->negLogConstant(); - minNegLogConstant = std::min(minNegLogConstant, value); - gcs.push_back(c); - fvs.push_back({c, value}); - } - conditionals = Conditionals({mode}, gcs); - pairs = FactorValuePairs({mode}, fvs); - } - - /// Construct from means and a sigmas. - Helper(const DiscreteKey mode, Key key, // - const Matrix &A1, Key parent1, const Matrix &A2, Key parent2, - const std::vector> ¶meters) - : nrFrontals(1), - minNegLogConstant(std::numeric_limits::infinity()) { - std::vector gcs; - std::vector fvs; - for (const auto &[mean, sigma] : parameters) { - auto c = GaussianConditional::sharedMeanAndStddev(key, A1, parent1, A2, - parent2, mean, sigma); - double value = c->negLogConstant(); - minNegLogConstant = std::min(minNegLogConstant, value); - gcs.push_back(c); - fvs.push_back({c, value}); - } - conditionals = Conditionals({mode}, gcs); - pairs = FactorValuePairs({mode}, fvs); - } }; /* *******************************************************************************/ From a25a0a923bebb50c531f725df0ff732d8c9db244 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 29 Sep 2024 00:13:08 -0700 Subject: [PATCH 379/398] perfect forwarding magic --- gtsam/hybrid/HybridGaussianConditional.cpp | 45 ++++++---------------- 1 file changed, 12 insertions(+), 33 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 068bd2e5d..c348b85f0 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -26,11 +26,10 @@ #include #include #include +#include #include -#include "gtsam/linear/JacobianFactor.h" - namespace gtsam { /* *******************************************************************************/ struct HybridGaussianConditional::Helper { @@ -42,47 +41,27 @@ struct HybridGaussianConditional::Helper { using GC = GaussianConditional; using P = std::vector>; - // Common code for three constructors below: - template - void initialize(const DiscreteKey &mode, const P &p, Create create) { + /// Construct from a vector of mean and sigma pairs, plus extra args. + template + Helper(const DiscreteKey &mode, const P &p, Args &&...args) { nrFrontals = 1; minNegLogConstant = std::numeric_limits::infinity(); std::vector fvs; std::vector gcs; for (const auto &[mean, sigma] : p) { - auto c = create(mean, sigma); - double value = c->negLogConstant(); + auto gaussianConditional = + GC::sharedMeanAndStddev(std::forward(args)..., mean, sigma); + double value = gaussianConditional->negLogConstant(); minNegLogConstant = std::min(minNegLogConstant, value); - fvs.push_back({c, value}); - gcs.push_back(c); + fvs.push_back({gaussianConditional, value}); + gcs.push_back(gaussianConditional); } conditionals = Conditionals({mode}, gcs); pairs = FactorValuePairs({mode}, fvs); } - // Constructors for different types of GaussianConditionals: - - Helper(const DiscreteKey &mode, Key x0, const P &p) { - initialize(mode, p, [x0](const Vector &mean, double sigma) { - return GC::sharedMeanAndStddev(x0, mean, sigma); - }); - } - - Helper(const DiscreteKey &mode, Key x0, const Matrix &A, Key x1, const P &p) { - initialize(mode, p, [x0, A, x1](const Vector &mean, double sigma) { - return GC::sharedMeanAndStddev(x0, A, x1, mean, sigma); - }); - } - - Helper(const DiscreteKey &mode, Key x0, // - const Matrix &A1, Key x1, const Matrix &A2, Key x2, const P &p) { - initialize(mode, p, [x0, A1, x1, A2, x2](const Vector &mean, double sigma) { - return GC::sharedMeanAndStddev(x0, A1, x1, A2, x2, mean, sigma); - }); - } - /// Construct from tree of GaussianConditionals. Helper(const Conditionals &conditionals) : conditionals(conditionals), @@ -124,14 +103,14 @@ HybridGaussianConditional::HybridGaussianConditional( const DiscreteKey mode, Key key, // const std::vector> ¶meters) : HybridGaussianConditional(DiscreteKeys{mode}, - Helper(mode, key, parameters)) {} + Helper(mode, parameters, key)) {} HybridGaussianConditional::HybridGaussianConditional( const DiscreteKey mode, Key key, // const Matrix &A, Key parent, const std::vector> ¶meters) : HybridGaussianConditional(DiscreteKeys{mode}, - Helper(mode, key, A, parent, parameters)) {} + Helper(mode, parameters, key, A, parent)) {} HybridGaussianConditional::HybridGaussianConditional( const DiscreteKey mode, Key key, // @@ -139,7 +118,7 @@ HybridGaussianConditional::HybridGaussianConditional( const std::vector> ¶meters) : HybridGaussianConditional( DiscreteKeys{mode}, - Helper(mode, key, A1, parent1, A2, parent2, parameters)) {} + Helper(mode, parameters, key, A1, parent1, A2, parent2)) {} HybridGaussianConditional::HybridGaussianConditional( const DiscreteKeys &discreteParents, From 4fa192248de37473fbc4a7e8ba8c9549bfa7c590 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 29 Sep 2024 00:21:40 -0700 Subject: [PATCH 380/398] Address o1 review comments --- gtsam/hybrid/HybridGaussianConditional.cpp | 17 ++++++++++------- gtsam/hybrid/HybridGaussianConditional.h | 6 +++--- 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index c348b85f0..b7c79a5b2 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -43,18 +43,20 @@ struct HybridGaussianConditional::Helper { /// Construct from a vector of mean and sigma pairs, plus extra args. template - Helper(const DiscreteKey &mode, const P &p, Args &&...args) { + explicit Helper(const DiscreteKey &mode, const P &p, Args &&...args) { nrFrontals = 1; minNegLogConstant = std::numeric_limits::infinity(); std::vector fvs; std::vector gcs; + fvs.reserve(p.size()); + gcs.reserve(p.size()); for (const auto &[mean, sigma] : p) { auto gaussianConditional = GC::sharedMeanAndStddev(std::forward(args)..., mean, sigma); double value = gaussianConditional->negLogConstant(); minNegLogConstant = std::min(minNegLogConstant, value); - fvs.push_back({gaussianConditional, value}); + fvs.emplace_back(gaussianConditional, value); gcs.push_back(gaussianConditional); } @@ -63,7 +65,7 @@ struct HybridGaussianConditional::Helper { } /// Construct from tree of GaussianConditionals. - Helper(const Conditionals &conditionals) + explicit Helper(const Conditionals &conditionals) : conditionals(conditionals), minNegLogConstant(std::numeric_limits::infinity()) { auto func = [this](const GC::shared_ptr &c) -> GaussianFactorValuePair { @@ -80,7 +82,8 @@ struct HybridGaussianConditional::Helper { pairs = FactorValuePairs(conditionals, func); if (!nrFrontals.has_value()) { throw std::runtime_error( - "HybridGaussianConditional: need at least one frontal variable."); + "HybridGaussianConditional: need at least one frontal variable. " + "Provided conditionals do not contain any frontal variables."); } } }; @@ -100,20 +103,20 @@ HybridGaussianConditional::HybridGaussianConditional( Conditionals({mode}, conditionals)) {} HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKey mode, Key key, // + const DiscreteKey &mode, Key key, // const std::vector> ¶meters) : HybridGaussianConditional(DiscreteKeys{mode}, Helper(mode, parameters, key)) {} HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKey mode, Key key, // + const DiscreteKey &mode, Key key, // const Matrix &A, Key parent, const std::vector> ¶meters) : HybridGaussianConditional(DiscreteKeys{mode}, Helper(mode, parameters, key, A, parent)) {} HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKey mode, Key key, // + const DiscreteKey &mode, Key key, // const Matrix &A1, Key parent1, const Matrix &A2, Key parent2, const std::vector> ¶meters) : HybridGaussianConditional( diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 7c14f0008..16fc0500f 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -89,18 +89,18 @@ class GTSAM_EXPORT HybridGaussianConditional /// Construct from mean `mu_i` and `sigma_i`. HybridGaussianConditional( - const DiscreteKey mode, Key key, // + const DiscreteKey &mode, Key key, // const std::vector> ¶meters); /// Construct from conditional mean `A1 p1 + b_i` and `sigma_i`. HybridGaussianConditional( - const DiscreteKey mode, Key key, // + const DiscreteKey &mode, Key key, // const Matrix &A, Key parent, const std::vector> ¶meters); /// Construct from conditional mean `A1 p1 + A2 p2 + b_i` and `sigma_i`. HybridGaussianConditional( - const DiscreteKey mode, Key key, // + const DiscreteKey &mode, Key key, // const Matrix &A1, Key parent1, const Matrix &A2, Key parent2, const std::vector> ¶meters); From cc79d0432dbf1278800680687fd12aa368c92784 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 29 Sep 2024 00:34:31 -0700 Subject: [PATCH 381/398] Doxygen --- gtsam/hybrid/HybridGaussianConditional.h | 38 ++++++++++++++++++++---- 1 file changed, 32 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 16fc0500f..bd7849d33 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -87,18 +87,44 @@ class GTSAM_EXPORT HybridGaussianConditional const DiscreteKey &mode, const std::vector &conditionals); - /// Construct from mean `mu_i` and `sigma_i`. + /** + * @brief Constructs a HybridGaussianConditional with means mu_i and + * standard deviations sigma_i. + * + * @param mode The discrete mode key. + * @param key The key for this conditional variable. + * @param parameters A vector of pairs (mu_i, sigma_i). + */ HybridGaussianConditional( - const DiscreteKey &mode, Key key, // + const DiscreteKey &mode, Key key, const std::vector> ¶meters); - /// Construct from conditional mean `A1 p1 + b_i` and `sigma_i`. + /** + * @brief Constructs a HybridGaussianConditional with conditional means + * A × parent + b_i and standard deviations sigma_i. + * + * @param mode The discrete mode key. + * @param key The key for this conditional variable. + * @param A The matrix A. + * @param parent The key of the parent variable. + * @param parameters A vector of pairs (b_i, sigma_i). + */ HybridGaussianConditional( - const DiscreteKey &mode, Key key, // - const Matrix &A, Key parent, + const DiscreteKey &mode, Key key, const Matrix &A, Key parent, const std::vector> ¶meters); - /// Construct from conditional mean `A1 p1 + A2 p2 + b_i` and `sigma_i`. + /** + * @brief Constructs a HybridGaussianConditional with conditional means + * A1 × parent1 + A2 × parent2 + b_i and standard deviations sigma_i. + * + * @param mode The discrete mode key. + * @param key The key for this conditional variable. + * @param A1 The first matrix. + * @param parent1 The key of the first parent variable. + * @param A2 The second matrix. + * @param parent2 The key of the second parent variable. + * @param parameters A vector of pairs (b_i, sigma_i). + */ HybridGaussianConditional( const DiscreteKey &mode, Key key, // const Matrix &A1, Key parent1, const Matrix &A2, Key parent2, From cfebf0cca009ddc44b4a9d980725c98262375419 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 29 Sep 2024 10:26:14 -0400 Subject: [PATCH 382/398] formatting --- gtsam/hybrid/HybridGaussianConditional.cpp | 5 +++++ gtsam/hybrid/HybridGaussianFactorGraph.cpp | 1 + 2 files changed, 6 insertions(+) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index b7c79a5b2..e8d2ec35f 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -96,18 +96,21 @@ HybridGaussianConditional::HybridGaussianConditional( conditionals_(helper.conditionals), negLogConstant_(helper.minNegLogConstant) {} +/* *******************************************************************************/ HybridGaussianConditional::HybridGaussianConditional( const DiscreteKey &mode, const std::vector &conditionals) : HybridGaussianConditional(DiscreteKeys{mode}, Conditionals({mode}, conditionals)) {} +/* *******************************************************************************/ HybridGaussianConditional::HybridGaussianConditional( const DiscreteKey &mode, Key key, // const std::vector> ¶meters) : HybridGaussianConditional(DiscreteKeys{mode}, Helper(mode, parameters, key)) {} +/* *******************************************************************************/ HybridGaussianConditional::HybridGaussianConditional( const DiscreteKey &mode, Key key, // const Matrix &A, Key parent, @@ -115,6 +118,7 @@ HybridGaussianConditional::HybridGaussianConditional( : HybridGaussianConditional(DiscreteKeys{mode}, Helper(mode, parameters, key, A, parent)) {} +/* *******************************************************************************/ HybridGaussianConditional::HybridGaussianConditional( const DiscreteKey &mode, Key key, // const Matrix &A1, Key parent1, const Matrix &A2, Key parent2, @@ -123,6 +127,7 @@ HybridGaussianConditional::HybridGaussianConditional( DiscreteKeys{mode}, Helper(mode, parameters, key, A1, parent1, A2, parent2)) {} +/* *******************************************************************************/ HybridGaussianConditional::HybridGaussianConditional( const DiscreteKeys &discreteParents, const HybridGaussianConditional::Conditionals &conditionals) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 787c0edd7..c107aa8a8 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -366,6 +366,7 @@ static std::shared_ptr createHybridGaussianFactor( return std::make_shared(discreteSeparator, newFactors); } +/* *******************************************************************************/ std::pair> HybridGaussianFactorGraph::eliminate(const Ordering &keys) const { // Since we eliminate all continuous variables first, From 3d8603b23b220e3af9e9845393ec9b88809f1870 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 29 Sep 2024 10:27:15 -0400 Subject: [PATCH 383/398] remove unnecessary includes --- gtsam/hybrid/tests/Switching.h | 7 +++---- gtsam/hybrid/tests/testHybridBayesNet.cpp | 1 - gtsam/hybrid/tests/testHybridEstimation.cpp | 1 - gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 1 - gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp | 1 - gtsam/hybrid/tests/testHybridNonlinearISAM.cpp | 1 - 6 files changed, 3 insertions(+), 9 deletions(-) diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 90137a4e3..82876fd2c 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -24,17 +24,16 @@ #include #include #include +#include +#include #include #include +#include #include #include #include -#include "gtsam/linear/GaussianFactor.h" -#include "gtsam/linear/GaussianFactorGraph.h" -#include "gtsam/nonlinear/NonlinearFactor.h" - #pragma once namespace gtsam { diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index c33ff8454..b555f6bd9 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -21,7 +21,6 @@ #include #include #include -#include #include #include "Switching.h" diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 25d697451..58decc695 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -30,7 +30,6 @@ #include #include #include -#include #include #include #include diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index 1c72268de..70fa321ad 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -24,7 +24,6 @@ #include #include #include -#include #include #include diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index b5f04bb99..4735c1657 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -28,7 +28,6 @@ #include #include #include -#include #include #include #include diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index 2452ebe6d..b804a176b 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -24,7 +24,6 @@ #include #include #include -#include #include #include From 2cf21007104fff0969722e0440b56cee1967d53c Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 29 Sep 2024 09:40:30 -0700 Subject: [PATCH 384/398] address review comments --- gtsam/hybrid/HybridGaussianConditional.cpp | 32 ++++++++++------------ gtsam/hybrid/HybridGaussianConditional.h | 16 +++++------ gtsam/hybrid/tests/testGaussianMixture.cpp | 22 ++++++++------- 3 files changed, 34 insertions(+), 36 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index e8d2ec35f..1712e06a9 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -51,7 +51,7 @@ struct HybridGaussianConditional::Helper { std::vector gcs; fvs.reserve(p.size()); gcs.reserve(p.size()); - for (const auto &[mean, sigma] : p) { + for (auto &&[mean, sigma] : p) { auto gaussianConditional = GC::sharedMeanAndStddev(std::forward(args)..., mean, sigma); double value = gaussianConditional->negLogConstant(); @@ -96,38 +96,34 @@ HybridGaussianConditional::HybridGaussianConditional( conditionals_(helper.conditionals), negLogConstant_(helper.minNegLogConstant) {} -/* *******************************************************************************/ HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKey &mode, + const DiscreteKey &discreteParent, const std::vector &conditionals) - : HybridGaussianConditional(DiscreteKeys{mode}, - Conditionals({mode}, conditionals)) {} + : HybridGaussianConditional(DiscreteKeys{discreteParent}, + Conditionals({discreteParent}, conditionals)) {} -/* *******************************************************************************/ HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKey &mode, Key key, // + const DiscreteKey &discreteParent, Key key, // const std::vector> ¶meters) - : HybridGaussianConditional(DiscreteKeys{mode}, - Helper(mode, parameters, key)) {} + : HybridGaussianConditional(DiscreteKeys{discreteParent}, + Helper(discreteParent, parameters, key)) {} -/* *******************************************************************************/ HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKey &mode, Key key, // + const DiscreteKey &discreteParent, Key key, // const Matrix &A, Key parent, const std::vector> ¶meters) - : HybridGaussianConditional(DiscreteKeys{mode}, - Helper(mode, parameters, key, A, parent)) {} + : HybridGaussianConditional( + DiscreteKeys{discreteParent}, + Helper(discreteParent, parameters, key, A, parent)) {} -/* *******************************************************************************/ HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKey &mode, Key key, // + const DiscreteKey &discreteParent, Key key, // const Matrix &A1, Key parent1, const Matrix &A2, Key parent2, const std::vector> ¶meters) : HybridGaussianConditional( - DiscreteKeys{mode}, - Helper(mode, parameters, key, A1, parent1, A2, parent2)) {} + DiscreteKeys{discreteParent}, + Helper(discreteParent, parameters, key, A1, parent1, A2, parent2)) {} -/* *******************************************************************************/ HybridGaussianConditional::HybridGaussianConditional( const DiscreteKeys &discreteParents, const HybridGaussianConditional::Conditionals &conditionals) diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index bd7849d33..827b7f309 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -79,45 +79,45 @@ class GTSAM_EXPORT HybridGaussianConditional /** * @brief Construct from one discrete key and vector of conditionals. * - * @param mode Single discrete parent variable + * @param discreteParent Single discrete parent variable * @param conditionals Vector of conditionals with the same size as the * cardinality of the discrete parent. */ HybridGaussianConditional( - const DiscreteKey &mode, + const DiscreteKey &discreteParent, const std::vector &conditionals); /** * @brief Constructs a HybridGaussianConditional with means mu_i and * standard deviations sigma_i. * - * @param mode The discrete mode key. + * @param discreteParent The discrete parent or "mode" key. * @param key The key for this conditional variable. * @param parameters A vector of pairs (mu_i, sigma_i). */ HybridGaussianConditional( - const DiscreteKey &mode, Key key, + const DiscreteKey &discreteParent, Key key, const std::vector> ¶meters); /** * @brief Constructs a HybridGaussianConditional with conditional means * A × parent + b_i and standard deviations sigma_i. * - * @param mode The discrete mode key. + * @param discreteParent The discrete parent or "mode" key. * @param key The key for this conditional variable. * @param A The matrix A. * @param parent The key of the parent variable. * @param parameters A vector of pairs (b_i, sigma_i). */ HybridGaussianConditional( - const DiscreteKey &mode, Key key, const Matrix &A, Key parent, + const DiscreteKey &discreteParent, Key key, const Matrix &A, Key parent, const std::vector> ¶meters); /** * @brief Constructs a HybridGaussianConditional with conditional means * A1 × parent1 + A2 × parent2 + b_i and standard deviations sigma_i. * - * @param mode The discrete mode key. + * @param discreteParent The discrete parent or "mode" key. * @param key The key for this conditional variable. * @param A1 The first matrix. * @param parent1 The key of the first parent variable. @@ -126,7 +126,7 @@ class GTSAM_EXPORT HybridGaussianConditional * @param parameters A vector of pairs (b_i, sigma_i). */ HybridGaussianConditional( - const DiscreteKey &mode, Key key, // + const DiscreteKey &discreteParent, Key key, // const Matrix &A1, Key parent1, const Matrix &A2, Key parent2, const std::vector> ¶meters); diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index d6eac9c38..3256f5686 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -79,15 +79,16 @@ TEST(GaussianMixture, GaussianMixtureModel) { double mu0 = 1.0, mu1 = 3.0; double sigma = 2.0; - HybridBayesNet hbn; + // Create a Gaussian mixture model p(z|m) with same sigma. + HybridBayesNet gmm; std::vector> parameters{{Vector1(mu0), sigma}, {Vector1(mu1), sigma}}; - hbn.emplace_shared(m, Z(0), parameters); - hbn.push_back(mixing); + gmm.emplace_shared(m, Z(0), parameters); + gmm.push_back(mixing); // At the halfway point between the means, we should get P(m|z)=0.5 double midway = mu1 - mu0; - auto pMid = SolveHBN(hbn, midway); + auto pMid = SolveHBN(gmm, midway); EXPECT(assert_equal(DiscreteConditional(m, "60/40"), pMid)); // Everywhere else, the result should be a sigmoid. @@ -96,7 +97,7 @@ TEST(GaussianMixture, GaussianMixtureModel) { const double expected = prob_m_z(mu0, mu1, sigma, sigma, z); // Workflow 1: convert HBN to HFG and solve - auto posterior1 = SolveHBN(hbn, z); + auto posterior1 = SolveHBN(gmm, z); EXPECT_DOUBLES_EQUAL(expected, posterior1(m1Assignment), 1e-8); // Workflow 2: directly specify HFG and solve @@ -117,16 +118,17 @@ TEST(GaussianMixture, GaussianMixtureModel2) { double mu0 = 1.0, mu1 = 3.0; double sigma0 = 8.0, sigma1 = 4.0; - HybridBayesNet hbn; + // Create a Gaussian mixture model p(z|m) with same sigma. + HybridBayesNet gmm; std::vector> parameters{{Vector1(mu0), sigma0}, {Vector1(mu1), sigma1}}; - hbn.emplace_shared(m, Z(0), parameters); - hbn.push_back(mixing); + gmm.emplace_shared(m, Z(0), parameters); + gmm.push_back(mixing); // We get zMax=3.1333 by finding the maximum value of the function, at which // point the mode m==1 is about twice as probable as m==0. double zMax = 3.133; - auto pMax = SolveHBN(hbn, zMax); + auto pMax = SolveHBN(gmm, zMax); EXPECT(assert_equal(DiscreteConditional(m, "42/58"), pMax, 1e-4)); // Everywhere else, the result should be a bell curve like function. @@ -135,7 +137,7 @@ TEST(GaussianMixture, GaussianMixtureModel2) { const double expected = prob_m_z(mu0, mu1, sigma0, sigma1, z); // Workflow 1: convert HBN to HFG and solve - auto posterior1 = SolveHBN(hbn, z); + auto posterior1 = SolveHBN(gmm, z); EXPECT_DOUBLES_EQUAL(expected, posterior1(m1Assignment), 1e-8); // Workflow 2: directly specify HFG and solve From dc2c98383738b977b986caacd7468b85dc5d6890 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 29 Sep 2024 11:53:29 -0700 Subject: [PATCH 385/398] Document and fix bug in modes --- gtsam/hybrid/tests/Switching.h | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 82876fd2c..547facce9 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -114,11 +114,11 @@ inline std::pair> makeBinaryOrdering( return {new_order, levels}; } -/* *************************************************************************** - */ +/* ****************************************************************************/ using MotionModel = BetweenFactor; // Test fixture with switching network. +/// ϕ(X(0)) .. ϕ(X(k),X(k+1)) .. ϕ(X(k);z_k) .. ϕ(M(0)) .. ϕ(M(k),M(k+1)) struct Switching { size_t K; DiscreteKeys modes; @@ -140,8 +140,8 @@ struct Switching { : K(K) { using noiseModel::Isotropic; - // Create DiscreteKeys for binary K modes. - for (size_t k = 0; k < K; k++) { + // Create DiscreteKeys for K-1 binary modes. + for (size_t k = 0; k < K - 1; k++) { modes.emplace_back(M(k), 2); } @@ -153,25 +153,26 @@ struct Switching { } // Create hybrid factor graph. - // Add a prior on X(0). + + // Add a prior ϕ(X(0)) on X(0). nonlinearFactorGraph.emplace_shared>( X(0), measurements.at(0), Isotropic::Sigma(1, prior_sigma)); - // Add "motion models". + // Add "motion models" ϕ(X(k),X(k+1)). for (size_t k = 0; k < K - 1; k++) { auto motion_models = motionModels(k, between_sigma); nonlinearFactorGraph.emplace_shared(modes[k], motion_models); } - // Add measurement factors + // Add measurement factors ϕ(X(k);z_k). auto measurement_noise = Isotropic::Sigma(1, prior_sigma); for (size_t k = 1; k < K; k++) { nonlinearFactorGraph.emplace_shared>( X(k), measurements.at(k), measurement_noise); } - // Add "mode chain" + // Add "mode chain" ϕ(M(0)) ϕ(M(0),M(1)) ... ϕ(M(K-3),M(K-2)) addModeChain(&nonlinearFactorGraph, discrete_transition_prob); // Create the linearization point. @@ -179,8 +180,6 @@ struct Switching { linearizationPoint.insert(X(k), static_cast(k + 1)); } - // The ground truth is robot moving forward - // and one less than the linearization point linearizedFactorGraph = *nonlinearFactorGraph.linearize(linearizationPoint); } @@ -196,7 +195,7 @@ struct Switching { } /** - * @brief Add "mode chain" to HybridNonlinearFactorGraph from M(0) to M(K-2). + * @brief Add "mode chain" to HybridNonlinearFactorGraph from M(0) to M(K-1). * E.g. if K=4, we want M0, M1 and M2. * * @param fg The factor graph to which the mode chain is added. From 28a2cd347514a79c3cc9c6044fa451bd959832eb Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 29 Sep 2024 13:25:34 -0700 Subject: [PATCH 386/398] Check invariants --- gtsam/hybrid/tests/testHybridGaussianConditional.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index 02163df9e..803d42f03 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -168,6 +168,9 @@ TEST(HybridGaussianConditional, ContinuousParents) { // Check that the continuous parent keys are correct: EXPECT(continuousParentKeys.size() == 1); EXPECT(continuousParentKeys[0] == X(0)); + + EXPECT(HybridGaussianConditional::CheckInvariants(hybrid_conditional, hv0)); + EXPECT(HybridGaussianConditional::CheckInvariants(hybrid_conditional, hv1)); } /* ************************************************************************* */ From 8675dc62df1277f84f6b689a2afbd64df5fb6b5e Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 29 Sep 2024 15:41:17 -0700 Subject: [PATCH 387/398] Throw if sampling conditions not satisfied --- gtsam/discrete/DiscreteConditional.cpp | 18 +++++++++++++----- gtsam/discrete/DiscreteConditional.h | 2 +- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/gtsam/discrete/DiscreteConditional.cpp b/gtsam/discrete/DiscreteConditional.cpp index 3f0c9f511..5ab0c59ec 100644 --- a/gtsam/discrete/DiscreteConditional.cpp +++ b/gtsam/discrete/DiscreteConditional.cpp @@ -259,8 +259,18 @@ size_t DiscreteConditional::argmax(const DiscreteValues& parentsValues) const { /* ************************************************************************** */ void DiscreteConditional::sampleInPlace(DiscreteValues* values) const { - assert(nrFrontals() == 1); - Key j = (firstFrontalKey()); + // throw if more than one frontal: + if (nrFrontals() != 1) { + throw std::invalid_argument( + "DiscreteConditional::sampleInPlace can only be called on single " + "variable conditionals"); + } + Key j = firstFrontalKey(); + // throw if values already contains j: + if (values->count(j) > 0) { + throw std::invalid_argument( + "DiscreteConditional::sampleInPlace: values already contains j"); + } size_t sampled = sample(*values); // Sample variable given parents (*values)[j] = sampled; // store result in partial solution } @@ -467,9 +477,7 @@ double DiscreteConditional::evaluate(const HybridValues& x) const { } /* ************************************************************************* */ -double DiscreteConditional::negLogConstant() const { - return 0.0; -} +double DiscreteConditional::negLogConstant() const { return 0.0; } /* ************************************************************************* */ diff --git a/gtsam/discrete/DiscreteConditional.h b/gtsam/discrete/DiscreteConditional.h index e16100d0a..f59e29285 100644 --- a/gtsam/discrete/DiscreteConditional.h +++ b/gtsam/discrete/DiscreteConditional.h @@ -168,7 +168,7 @@ class GTSAM_EXPORT DiscreteConditional static_cast(this)->print(s, formatter); } - /// Evaluate, just look up in AlgebraicDecisonTree + /// Evaluate, just look up in AlgebraicDecisionTree double evaluate(const DiscreteValues& values) const { return ADT::operator()(values); } From 80a4cd1bfca60ce28667f000dce397435a5eaeda Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 29 Sep 2024 15:42:02 -0700 Subject: [PATCH 388/398] Only sample if not provided --- gtsam/discrete/DiscreteBayesNet.cpp | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/gtsam/discrete/DiscreteBayesNet.cpp b/gtsam/discrete/DiscreteBayesNet.cpp index 1c5c81e45..56265b0a4 100644 --- a/gtsam/discrete/DiscreteBayesNet.cpp +++ b/gtsam/discrete/DiscreteBayesNet.cpp @@ -58,7 +58,12 @@ DiscreteValues DiscreteBayesNet::sample(DiscreteValues result) const { // sample each node in turn in topological sort order (parents first) for (auto it = std::make_reverse_iterator(end()); it != std::make_reverse_iterator(begin()); ++it) { - (*it)->sampleInPlace(&result); + const DiscreteConditional::shared_ptr& conditional = *it; + // Sample the conditional only if value for j not already in result + const Key j = conditional->firstFrontalKey(); + if (result.count(j) == 0) { + conditional->sampleInPlace(&result); + } } return result; } From 846c7a1a99b7f573dd7f6ec23c9d91c2e63e7f9a Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 29 Sep 2024 22:56:16 -0700 Subject: [PATCH 389/398] Make testable --- gtsam/hybrid/HybridGaussianFactorGraph.h | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 923f48e38..7e3aac663 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -231,4 +231,9 @@ class GTSAM_EXPORT HybridGaussianFactorGraph GaussianFactorGraph operator()(const DiscreteValues& assignment) const; }; +// traits +template <> +struct traits + : public Testable {}; + } // namespace gtsam From 21171b3a9a59926d2bdaf7545f6b9bc56565f62a Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 29 Sep 2024 23:57:37 -0700 Subject: [PATCH 390/398] Fix equality --- gtsam/hybrid/HybridGaussianConditional.cpp | 9 ++++----- gtsam/hybrid/HybridGaussianFactor.cpp | 7 +++---- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 1712e06a9..fb89f72fc 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -192,11 +192,10 @@ bool HybridGaussianConditional::equals(const HybridFactor &lf, // Check the base and the factors: return BaseFactor::equals(*e, tol) && - conditionals_.equals(e->conditionals_, - [tol](const GaussianConditional::shared_ptr &f1, - const GaussianConditional::shared_ptr &f2) { - return f1->equals(*(f2), tol); - }); + conditionals_.equals( + e->conditionals_, [tol](const auto &f1, const auto &f2) { + return (!f1 && !f2) || (f1 && f2 && f1->equals(*f2, tol)); + }); } /* *******************************************************************************/ diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index 7f8e808bf..6a9fc5c35 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -154,10 +154,9 @@ bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const { // Check the base and the factors: return Base::equals(*e, tol) && - factors_.equals(e->factors_, - [tol](const sharedFactor &f1, const sharedFactor &f2) { - return f1->equals(*f2, tol); - }); + factors_.equals(e->factors_, [tol](const auto &f1, const auto &f2) { + return (!f1 && !f2) || (f1 && f2 && f1->equals(*f2, tol)); + }); } /* *******************************************************************************/ From d042359a997949255a4ddd2e34e1d7d01bd8eba7 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 29 Sep 2024 23:57:48 -0700 Subject: [PATCH 391/398] choose method --- gtsam/hybrid/HybridGaussianConditional.cpp | 2 +- gtsam/hybrid/HybridGaussianConditional.h | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index fb89f72fc..1db13e95b 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -168,7 +168,7 @@ size_t HybridGaussianConditional::nrComponents() const { } /* *******************************************************************************/ -GaussianConditional::shared_ptr HybridGaussianConditional::operator()( +GaussianConditional::shared_ptr HybridGaussianConditional::choose( const DiscreteValues &discreteValues) const { auto &ptr = conditionals_(discreteValues); if (!ptr) return nullptr; diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 827b7f309..68c63e7bd 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -159,9 +159,15 @@ class GTSAM_EXPORT HybridGaussianConditional /// @{ /// @brief Return the conditional Gaussian for the given discrete assignment. - GaussianConditional::shared_ptr operator()( + GaussianConditional::shared_ptr choose( const DiscreteValues &discreteValues) const; + /// @brief Syntactic sugar for choose. + GaussianConditional::shared_ptr operator()( + const DiscreteValues &discreteValues) const { + return choose(discreteValues); + } + /// Returns the total number of continuous components size_t nrComponents() const; From 5b909e3c2918a74228d5ee3ad50f7d4afe13cbd6 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 29 Sep 2024 23:58:21 -0700 Subject: [PATCH 392/398] Clarify choose method --- gtsam/hybrid/HybridBayesNet.cpp | 2 +- gtsam/hybrid/HybridBayesNet.h | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 6e96afb25..3c77e3f9a 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -206,7 +206,7 @@ GaussianBayesNet HybridBayesNet::choose( for (auto &&conditional : *this) { if (auto gm = conditional->asHybrid()) { // If conditional is hybrid, select based on assignment. - gbn.push_back((*gm)(assignment)); + gbn.push_back(gm->choose(assignment)); } else if (auto gc = conditional->asGaussian()) { // If continuous only, add Gaussian conditional. gbn.push_back(gc); diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 22a43f3bd..62688e8b2 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -127,6 +127,8 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { * @brief Get the Gaussian Bayes Net which corresponds to a specific discrete * value assignment. * + * @note Any pure discrete factors are ignored. + * * @param assignment The discrete value assignment for the discrete keys. * @return GaussianBayesNet */ From 12349b9201d2fbdce6e245d8acd0da5acf85f24a Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 30 Sep 2024 00:32:06 -0700 Subject: [PATCH 393/398] Helper method can be static --- gtsam/hybrid/HybridGaussianFactor.cpp | 17 ++++++++--------- gtsam/hybrid/HybridGaussianFactor.h | 4 ---- 2 files changed, 8 insertions(+), 13 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index 6a9fc5c35..b04db4977 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -212,16 +212,15 @@ GaussianFactorGraphTree HybridGaussianFactor::asGaussianFactorGraphTree() } /* *******************************************************************************/ -double HybridGaussianFactor::potentiallyPrunedComponentError( - const sharedFactor &gf, const VectorValues &values) const { +/// Helper method to compute the error of a component. +static double PotentiallyPrunedComponentError( + const GaussianFactor::shared_ptr &gf, const VectorValues &values) { // Check if valid pointer if (gf) { return gf->error(values); } else { - // If not valid, pointer, it means this component was pruned, - // so we return maximum error. - // This way the negative exponential will give - // a probability value close to 0.0. + // If nullptr this component was pruned, so we return maximum error. This + // way the negative exponential will give a probability value close to 0.0. return std::numeric_limits::max(); } } @@ -230,8 +229,8 @@ double HybridGaussianFactor::potentiallyPrunedComponentError( AlgebraicDecisionTree HybridGaussianFactor::errorTree( const VectorValues &continuousValues) const { // functor to convert from sharedFactor to double error value. - auto errorFunc = [this, &continuousValues](const sharedFactor &gf) { - return this->potentiallyPrunedComponentError(gf, continuousValues); + auto errorFunc = [&continuousValues](const sharedFactor &gf) { + return PotentiallyPrunedComponentError(gf, continuousValues); }; DecisionTree error_tree(factors_, errorFunc); return error_tree; @@ -241,7 +240,7 @@ AlgebraicDecisionTree HybridGaussianFactor::errorTree( double HybridGaussianFactor::error(const HybridValues &values) const { // Directly index to get the component, no need to build the whole tree. const sharedFactor gf = factors_(values.discrete()); - return potentiallyPrunedComponentError(gf, values.continuous()); + return PotentiallyPrunedComponentError(gf, values.continuous()); } } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index f23d065b6..e5a575409 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -189,10 +189,6 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { */ static Factors augment(const FactorValuePairs &factors); - /// Helper method to compute the error of a component. - double potentiallyPrunedComponentError( - const sharedFactor &gf, const VectorValues &continuousValues) const; - /// Helper struct to assist private constructor below. struct ConstructorHelper; From 44fb786b7aac9227424c785465e98dda037f18f6 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 30 Sep 2024 01:38:06 -0700 Subject: [PATCH 394/398] Much more comprehensive tests --- gtsam/hybrid/tests/testHybridBayesNet.cpp | 118 +++++++++++++++++++--- 1 file changed, 102 insertions(+), 16 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index b555f6bd9..79979ac83 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -62,32 +62,117 @@ TEST(HybridBayesNet, Add) { } /* ****************************************************************************/ -// Test evaluate for a pure discrete Bayes net P(Asia). +// Test API for a pure discrete Bayes net P(Asia). TEST(HybridBayesNet, EvaluatePureDiscrete) { HybridBayesNet bayesNet; - bayesNet.emplace_shared(Asia, "4/6"); - HybridValues values; - values.insert(asiaKey, 0); - EXPECT_DOUBLES_EQUAL(0.4, bayesNet.evaluate(values), 1e-9); + const auto pAsia = std::make_shared(Asia, "4/6"); + bayesNet.push_back(pAsia); + HybridValues zero{{}, {{asiaKey, 0}}}, one{{}, {{asiaKey, 1}}}; + + // choose + GaussianBayesNet empty; + EXPECT(assert_equal(empty, bayesNet.choose(zero.discrete()), 1e-9)); + + // evaluate + EXPECT_DOUBLES_EQUAL(0.4, bayesNet.evaluate(zero), 1e-9); + EXPECT_DOUBLES_EQUAL(0.4, bayesNet(zero), 1e-9); + + // optimize + EXPECT(assert_equal(one, bayesNet.optimize())); + EXPECT(assert_equal(VectorValues{}, bayesNet.optimize(one.discrete()))); + + // sample + std::mt19937_64 rng(42); + EXPECT(assert_equal(zero, bayesNet.sample(&rng))); + EXPECT(assert_equal(one, bayesNet.sample(one, &rng))); + EXPECT(assert_equal(zero, bayesNet.sample(zero, &rng))); + + // error + EXPECT_DOUBLES_EQUAL(-log(0.4), bayesNet.error(zero), 1e-9); + EXPECT_DOUBLES_EQUAL(-log(0.6), bayesNet.error(one), 1e-9); + + // logProbability + EXPECT_DOUBLES_EQUAL(log(0.4), bayesNet.logProbability(zero), 1e-9); + EXPECT_DOUBLES_EQUAL(log(0.6), bayesNet.logProbability(one), 1e-9); + + // toFactorGraph + HybridGaussianFactorGraph expectedFG{pAsia}, fg = bayesNet.toFactorGraph({}); + EXPECT(assert_equal(expectedFG, fg)); + + // prune, imperative :-( + EXPECT(assert_equal(bayesNet, bayesNet.prune(2))); + EXPECT_LONGS_EQUAL(1, bayesNet.prune(1).at(0)->size()); } /* ****************************************************************************/ // Test creation of a tiny hybrid Bayes net. TEST(HybridBayesNet, Tiny) { - auto bn = tiny::createHybridBayesNet(); - EXPECT_LONGS_EQUAL(3, bn.size()); + auto bayesNet = tiny::createHybridBayesNet(); // P(z|x,mode)P(x)P(mode) + EXPECT_LONGS_EQUAL(3, bayesNet.size()); const VectorValues vv{{Z(0), Vector1(5.0)}, {X(0), Vector1(5.0)}}; - auto fg = bn.toFactorGraph(vv); + HybridValues zero{vv, {{M(0), 0}}}, one{vv, {{M(0), 1}}}; + + // Check Invariants for components + HybridGaussianConditional::shared_ptr hgc = bayesNet.at(0)->asHybrid(); + GaussianConditional::shared_ptr gc0 = hgc->choose(zero.discrete()), + gc1 = hgc->choose(one.discrete()); + GaussianConditional::shared_ptr px = bayesNet.at(1)->asGaussian(); + GaussianConditional::CheckInvariants(*gc0, vv); + GaussianConditional::CheckInvariants(*gc1, vv); + GaussianConditional::CheckInvariants(*px, vv); + HybridGaussianConditional::CheckInvariants(*hgc, zero); + HybridGaussianConditional::CheckInvariants(*hgc, one); + + // choose + GaussianBayesNet expectedChosen; + expectedChosen.push_back(gc0); + expectedChosen.push_back(px); + auto chosen0 = bayesNet.choose(zero.discrete()); + auto chosen1 = bayesNet.choose(one.discrete()); + EXPECT(assert_equal(expectedChosen, chosen0, 1e-9)); + + // logProbability + const double logP0 = chosen0.logProbability(vv) + log(0.4); // 0.4 is prior + const double logP1 = chosen1.logProbability(vv) + log(0.6); // 0.6 is prior + EXPECT_DOUBLES_EQUAL(logP0, bayesNet.logProbability(zero), 1e-9); + EXPECT_DOUBLES_EQUAL(logP1, bayesNet.logProbability(one), 1e-9); + + // evaluate + EXPECT_DOUBLES_EQUAL(exp(logP0), bayesNet.evaluate(zero), 1e-9); + + // optimize + EXPECT(assert_equal(one, bayesNet.optimize())); + EXPECT(assert_equal(chosen0.optimize(), bayesNet.optimize(zero.discrete()))); + + // sample + std::mt19937_64 rng(42); + EXPECT(assert_equal({{M(0), 1}}, bayesNet.sample(&rng).discrete())); + + // error + const double error0 = chosen0.error(vv) + gc0->negLogConstant() - + px->negLogConstant() - log(0.4); + const double error1 = chosen1.error(vv) + gc1->negLogConstant() - + px->negLogConstant() - log(0.6); + EXPECT_DOUBLES_EQUAL(error0, bayesNet.error(zero), 1e-9); + EXPECT_DOUBLES_EQUAL(error1, bayesNet.error(one), 1e-9); + EXPECT_DOUBLES_EQUAL(error0 + logP0, error1 + logP1, 1e-9); + + // toFactorGraph + auto fg = bayesNet.toFactorGraph({{Z(0), Vector1(5.0)}}); EXPECT_LONGS_EQUAL(3, fg.size()); // Check that the ratio of probPrime to evaluate is the same for all modes. std::vector ratio(2); - for (size_t mode : {0, 1}) { - const HybridValues hv{vv, {{M(0), mode}}}; - ratio[mode] = std::exp(-fg.error(hv)) / bn.evaluate(hv); - } + ratio[0] = std::exp(-fg.error(zero)) / bayesNet.evaluate(zero); + ratio[1] = std::exp(-fg.error(one)) / bayesNet.evaluate(one); EXPECT_DOUBLES_EQUAL(ratio[0], ratio[1], 1e-8); + + // prune, imperative :-( + auto pruned = bayesNet.prune(1); + EXPECT_LONGS_EQUAL(1, pruned.at(0)->asHybrid()->nrComponents()); + EXPECT(!pruned.equals(bayesNet)); + } /* ****************************************************************************/ @@ -223,12 +308,15 @@ TEST(HybridBayesNet, Optimize) { /* ****************************************************************************/ // Test Bayes net error TEST(HybridBayesNet, Pruning) { + // Create switching network with three continuous variables and two discrete: + // ϕ(x0) ϕ(x0,x1,m0) ϕ(x1,x2,m1) ϕ(x0;z0) ϕ(x1;z1) ϕ(x2;z2) ϕ(m0) ϕ(m0,m1) Switching s(3); HybridBayesNet::shared_ptr posterior = s.linearizedFactorGraph.eliminateSequential(); EXPECT_LONGS_EQUAL(5, posterior->size()); + // Optimize HybridValues delta = posterior->optimize(); auto actualTree = posterior->evaluate(delta.continuous()); @@ -254,7 +342,6 @@ TEST(HybridBayesNet, Pruning) { logProbability += posterior->at(0)->asHybrid()->logProbability(hybridValues); logProbability += posterior->at(1)->asHybrid()->logProbability(hybridValues); logProbability += posterior->at(2)->asHybrid()->logProbability(hybridValues); - // NOTE(dellaert): the discrete errors were not added in logProbability tree! logProbability += posterior->at(3)->asDiscrete()->logProbability(hybridValues); logProbability += @@ -316,10 +403,9 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) { #endif // regression - DiscreteKeys dkeys{{M(0), 2}, {M(1), 2}, {M(2), 2}}; DecisionTreeFactor::ADT potentials( - dkeys, std::vector{0, 0, 0, 0.505145423, 0, 1, 0, 0.494854577}); - DiscreteConditional expected_discrete_conditionals(1, dkeys, potentials); + s.modes, std::vector{0, 0, 0, 0.505145423, 0, 1, 0, 0.494854577}); + DiscreteConditional expected_discrete_conditionals(1, s.modes, potentials); // Prune! posterior->prune(maxNrLeaves); From 3cd816341ccbc2b0b3e617d9723431c1c4775ec9 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 30 Sep 2024 12:49:29 -0700 Subject: [PATCH 395/398] refactor printErrors --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 92 ++++++++-------------- 1 file changed, 34 insertions(+), 58 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index c107aa8a8..8a2a7fd15 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -74,6 +74,32 @@ const Ordering HybridOrdering(const HybridGaussianFactorGraph &graph) { index, KeyVector(discrete_keys.begin(), discrete_keys.end()), true); } +/* ************************************************************************ */ +static void printFactor(const std::shared_ptr &factor, + const DiscreteValues &assignment, + const KeyFormatter &keyFormatter) { + if (auto hgf = std::dynamic_pointer_cast(factor)) { + hgf->operator()(assignment) + ->print("HybridGaussianFactor, component:", keyFormatter); + } else if (auto gf = std::dynamic_pointer_cast(factor)) { + factor->print("GaussianFactor:\n", keyFormatter); + } else if (auto df = std::dynamic_pointer_cast(factor)) { + factor->print("DiscreteFactor:\n", keyFormatter); + } else if (auto hc = std::dynamic_pointer_cast(factor)) { + if (hc->isContinuous()) { + factor->print("GaussianConditional:\n", keyFormatter); + } else if (hc->isDiscrete()) { + factor->print("DiscreteConditional:\n", keyFormatter); + } else { + hc->asHybrid() + ->choose(assignment) + ->print("HybridConditional, component:\n", keyFormatter); + } + } else { + factor->print("Unknown factor type\n", keyFormatter); + } +} + /* ************************************************************************ */ void HybridGaussianFactorGraph::printErrors( const HybridValues &values, const std::string &str, @@ -83,69 +109,19 @@ void HybridGaussianFactorGraph::printErrors( &printCondition) const { std::cout << str << "size: " << size() << std::endl << std::endl; - std::stringstream ss; - for (size_t i = 0; i < factors_.size(); i++) { auto &&factor = factors_[i]; - std::cout << "Factor " << i << ": "; - - // Clear the stringstream - ss.str(std::string()); - - if (auto hgf = std::dynamic_pointer_cast(factor)) { - if (factor == nullptr) { - std::cout << "nullptr" - << "\n"; - } else { - hgf->operator()(values.discrete())->print(ss.str(), keyFormatter); - std::cout << "error = " << factor->error(values) << std::endl; - } - } else if (auto hc = std::dynamic_pointer_cast(factor)) { - if (factor == nullptr) { - std::cout << "nullptr" - << "\n"; - } else { - if (hc->isContinuous()) { - factor->print(ss.str(), keyFormatter); - std::cout << "error = " << hc->asGaussian()->error(values) << "\n"; - } else if (hc->isDiscrete()) { - factor->print(ss.str(), keyFormatter); - std::cout << "error = " << hc->asDiscrete()->error(values.discrete()) - << "\n"; - } else { - // Is hybrid - auto conditionalComponent = - hc->asHybrid()->operator()(values.discrete()); - conditionalComponent->print(ss.str(), keyFormatter); - std::cout << "error = " << conditionalComponent->error(values) - << "\n"; - } - } - } else if (auto gf = std::dynamic_pointer_cast(factor)) { - const double errorValue = (factor != nullptr ? gf->error(values) : .0); - if (!printCondition(factor.get(), errorValue, i)) - continue; // User-provided filter did not pass - - if (factor == nullptr) { - std::cout << "nullptr" - << "\n"; - } else { - factor->print(ss.str(), keyFormatter); - std::cout << "error = " << errorValue << "\n"; - } - } else if (auto df = std::dynamic_pointer_cast(factor)) { - if (factor == nullptr) { - std::cout << "nullptr" - << "\n"; - } else { - factor->print(ss.str(), keyFormatter); - std::cout << "error = " << df->error(values.discrete()) << std::endl; - } - - } else { + if (factor == nullptr) { + std::cout << "Factor " << i << ": nullptr\n"; continue; } + const double errorValue = factor->error(values); + if (!printCondition(factor.get(), errorValue, i)) + continue; // User-provided filter did not pass + // Print the factor + std::cout << "Factor " << i << ", error = " << errorValue << "\n"; + printFactor(factor, values.discrete(), keyFormatter); std::cout << "\n"; } std::cout.flush(); From d2f1894bacf584c49e62691769853f6adcff5084 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 1 Oct 2024 14:26:11 -0400 Subject: [PATCH 396/398] implement errorTree for HybridNonlinearFactorGraph --- gtsam/hybrid/HybridNonlinearFactorGraph.cpp | 31 +++++++++++++++++++++ gtsam/hybrid/HybridNonlinearFactorGraph.h | 13 +++++++++ 2 files changed, 44 insertions(+) diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp index 6d19ef156..0e7e9c692 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -179,4 +179,35 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize( return linearFG; } +/* ************************************************************************* */ +AlgebraicDecisionTree HybridNonlinearFactorGraph::errorTree( + const Values& values) const { + AlgebraicDecisionTree result(0.0); + + // Iterate over each factor. + for (auto& factor : factors_) { + if (auto hnf = std::dynamic_pointer_cast(factor)) { + // Compute factor error and add it. + result = result + hnf->errorTree(values); + + } else if (auto nf = std::dynamic_pointer_cast(factor)) { + // If continuous only, get the (double) error + // and add it to every leaf of the result + result = result + nf->error(values); + + } else if (auto df = std::dynamic_pointer_cast(factor)) { + // If discrete, just add its errorTree as well + result = result + df->errorTree(); + + } else { + throw std::runtime_error( + "HybridNonlinearFactorGraph::errorTree(Values) not implemented for " + "factor type " + + demangle(typeid(factor).name()) + "."); + } + } + + return result; +} + } // namespace gtsam diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.h b/gtsam/hybrid/HybridNonlinearFactorGraph.h index 5a09f18d4..53920a4aa 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.h +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.h @@ -90,6 +90,19 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph { /// Expose error(const HybridValues&) method. using Base::error; + /** + * @brief Compute error of (hybrid) nonlinear factors and discrete factors + * over each discrete assignment, and return as a tree. + * + * Error \f$ e = \Vert f(x) - \mu \Vert_{\Sigma} \f$. + * + * @note: Gaussian and hybrid Gaussian factors are not considered! + * + * @param values Manifold values at which to compute the error. + * @return AlgebraicDecisionTree + */ + AlgebraicDecisionTree errorTree(const Values& values) const; + /// @} }; From 2463245ec21aa77301a790b879245c0f35a75887 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 1 Oct 2024 14:38:51 -0400 Subject: [PATCH 397/398] add unit test --- .../tests/testHybridNonlinearFactorGraph.cpp | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 4735c1657..8221321ea 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -211,6 +211,24 @@ TEST(HybridNonlinearFactorGraph, PushBack) { // EXPECT_LONGS_EQUAL(3, hnfg.size()); } +/* ****************************************************************************/ +// Test hybrid nonlinear factor graph errorTree +TEST(HybridNonlinearFactorGraph, ErrorTree) { + Switching s(3); + + HybridNonlinearFactorGraph graph = s.nonlinearFactorGraph; + + auto error_tree = graph.errorTree(s.linearizationPoint); + + auto dkeys = graph.discreteKeys(); + std::vector discrete_keys(dkeys.begin(), dkeys.end()); + std::vector leaves = {152.79175946923, 151.59861228867, + 151.70397280433, 151.60943791243}; + AlgebraicDecisionTree expected_error(discrete_keys, leaves); + // regression + EXPECT(assert_equal(expected_error, error_tree, 1e-7)); +} + /**************************************************************************** * Test construction of switching-like hybrid factor graph. */ From dc5c4cb99dbaf7361e70b33cfa4417886a00d40f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 1 Oct 2024 15:50:33 -0400 Subject: [PATCH 398/398] compute expected error --- .../tests/testHybridNonlinearFactorGraph.cpp | 21 +++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 8221321ea..647a8b646 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -217,15 +217,28 @@ TEST(HybridNonlinearFactorGraph, ErrorTree) { Switching s(3); HybridNonlinearFactorGraph graph = s.nonlinearFactorGraph; + Values values = s.linearizationPoint; auto error_tree = graph.errorTree(s.linearizationPoint); auto dkeys = graph.discreteKeys(); - std::vector discrete_keys(dkeys.begin(), dkeys.end()); - std::vector leaves = {152.79175946923, 151.59861228867, - 151.70397280433, 151.60943791243}; + DiscreteKeys discrete_keys(dkeys.begin(), dkeys.end()); + + // Compute the sum of errors for each factor. + auto assignments = DiscreteValues::CartesianProduct(discrete_keys); + std::vector leaves(assignments.size()); + for (auto &&factor : graph) { + for (size_t i = 0; i < assignments.size(); ++i) { + leaves[i] += + factor->error(HybridValues(VectorValues(), assignments[i], values)); + } + } + // Swap i=1 and i=2 to give correct ordering. + double temp = leaves[1]; + leaves[1] = leaves[2]; + leaves[2] = temp; AlgebraicDecisionTree expected_error(discrete_keys, leaves); - // regression + EXPECT(assert_equal(expected_error, error_tree, 1e-7)); }