From 77664288cffc3200ec8a43926efe0aa6eac80d5a Mon Sep 17 00:00:00 2001 From: Akshay Krishnan Date: Wed, 5 Oct 2022 16:34:28 -0400 Subject: [PATCH 01/41] Update geometry.i --- gtsam/geometry/geometry.i | 1 + 1 file changed, 1 insertion(+) diff --git a/gtsam/geometry/geometry.i b/gtsam/geometry/geometry.i index 988727b98..3919af496 100644 --- a/gtsam/geometry/geometry.i +++ b/gtsam/geometry/geometry.i @@ -329,6 +329,7 @@ class Rot3 { // Operator Overloads gtsam::Rot3 operator*(const gtsam::Rot3& p2) const; + gtsam::Rot3 operator*(const gtsam::Unit3& p) const; // Manifold // gtsam::Rot3 retractCayley(Vector v) const; // TODO, does not exist in both From e2f8ad8962de897d2c8dbeb68f0bb262b0b73f7e Mon Sep 17 00:00:00 2001 From: Akshay Krishnan Date: Wed, 5 Oct 2022 16:37:51 -0400 Subject: [PATCH 02/41] Update geometry.i --- gtsam/geometry/geometry.i | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/gtsam/geometry/geometry.i b/gtsam/geometry/geometry.i index 3919af496..5675265a4 100644 --- a/gtsam/geometry/geometry.i +++ b/gtsam/geometry/geometry.i @@ -329,7 +329,7 @@ class Rot3 { // Operator Overloads gtsam::Rot3 operator*(const gtsam::Rot3& p2) const; - gtsam::Rot3 operator*(const gtsam::Unit3& p) const; + gtsam::Unit3 operator*(const gtsam::Unit3& p) const; // Manifold // gtsam::Rot3 retractCayley(Vector v) const; // TODO, does not exist in both @@ -341,6 +341,10 @@ class Rot3 { gtsam::Point3 rotate(const gtsam::Point3& p) const; gtsam::Point3 unrotate(const gtsam::Point3& p) const; + // Group action on Unit3 + gtsam::Unit3 rotate(const gtsam::Unit3& p) const; + gtsam::Unit3 unrotate(const gtsam::Unit3& p) const; + // Standard Interface static gtsam::Rot3 Expmap(Vector v); static Vector Logmap(const gtsam::Rot3& p); From ece5640133ad01f419c43e641bd71d4fc8ca4c86 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 24 Dec 2022 20:05:45 +0530 Subject: [PATCH 03/41] fix wrapper warnings --- gtsam_unstable/gtsam_unstable.i | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/gtsam_unstable/gtsam_unstable.i b/gtsam_unstable/gtsam_unstable.i index c6dbd4ab6..6ce7be20a 100644 --- a/gtsam_unstable/gtsam_unstable.i +++ b/gtsam_unstable/gtsam_unstable.i @@ -21,9 +21,7 @@ virtual class gtsam::noiseModel::Isotropic; virtual class gtsam::imuBias::ConstantBias; virtual class gtsam::NonlinearFactor; virtual class gtsam::NoiseModelFactor; -virtual class gtsam::NoiseModelFactor2; -virtual class gtsam::NoiseModelFactor3; -virtual class gtsam::NoiseModelFactor4; +virtual class gtsam::NoiseModelFactorN; virtual class gtsam::GaussianFactor; virtual class gtsam::HessianFactor; virtual class gtsam::JacobianFactor; @@ -430,8 +428,9 @@ virtual class IMUFactor : gtsam::NoiseModelFactor { Vector gyro() const; Vector accel() const; Vector z() const; - size_t key1() const; - size_t key2() const; + + template + size_t key() const; }; #include @@ -448,8 +447,9 @@ virtual class FullIMUFactor : gtsam::NoiseModelFactor { Vector gyro() const; Vector accel() const; Vector z() const; - size_t key1() const; - size_t key2() const; + + template + size_t key() const; }; #include @@ -733,14 +733,14 @@ class AHRS { // Tectonic SAM Factors #include -//typedef gtsam::NoiseModelFactor2 NLPosePose; +//typedef gtsam::NoiseModelFactorN NLPosePose; virtual class DeltaFactor : gtsam::NoiseModelFactor { DeltaFactor(size_t i, size_t j, const gtsam::Point2& measured, const gtsam::noiseModel::Base* noiseModel); //void print(string s) const; }; -//typedef gtsam::NoiseModelFactor4 NLPosePosePosePoint; virtual class DeltaFactorBase : gtsam::NoiseModelFactor { DeltaFactorBase(size_t b1, size_t i, size_t b2, size_t j, @@ -748,7 +748,7 @@ virtual class DeltaFactorBase : gtsam::NoiseModelFactor { //void print(string s) const; }; -//typedef gtsam::NoiseModelFactor4 NLPosePosePosePose; virtual class OdometryFactorBase : gtsam::NoiseModelFactor { OdometryFactorBase(size_t b1, size_t i, size_t b2, size_t j, From f2ab4afda488648ec1bbf18da41c3e15dcca8cc7 Mon Sep 17 00:00:00 2001 From: Akshay Krishnan Date: Sat, 24 Dec 2022 10:06:15 -0500 Subject: [PATCH 04/41] remove * overator --- gtsam/geometry/geometry.i | 1 - 1 file changed, 1 deletion(-) diff --git a/gtsam/geometry/geometry.i b/gtsam/geometry/geometry.i index 5675265a4..310ae4289 100644 --- a/gtsam/geometry/geometry.i +++ b/gtsam/geometry/geometry.i @@ -329,7 +329,6 @@ class Rot3 { // Operator Overloads gtsam::Rot3 operator*(const gtsam::Rot3& p2) const; - gtsam::Unit3 operator*(const gtsam::Unit3& p) const; // Manifold // gtsam::Rot3 retractCayley(Vector v) const; // TODO, does not exist in both From 6f5e85664c0b58771a1e49e792cc0b4ea2c445cf Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 24 Dec 2022 20:36:26 +0530 Subject: [PATCH 05/41] fix some more deprecation warnings --- gtsam/slam/StereoFactor.h | 2 +- gtsam_unstable/slam/LocalOrientedPlane3Factor.cpp | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/gtsam/slam/StereoFactor.h b/gtsam/slam/StereoFactor.h index 3be255e45..1d2ef501d 100644 --- a/gtsam/slam/StereoFactor.h +++ b/gtsam/slam/StereoFactor.h @@ -144,7 +144,7 @@ public: std::cout << e.what() << ": Landmark "<< DefaultKeyFormatter(this->template key<2>()) << " moved behind camera " << DefaultKeyFormatter(this->template key<1>()) << std::endl; if (throwCheirality_) - throw StereoCheiralityException(this->key2()); + throw StereoCheiralityException(this->template key<2>()); } return Vector3::Constant(2.0 * K_->fx()); } diff --git a/gtsam_unstable/slam/LocalOrientedPlane3Factor.cpp b/gtsam_unstable/slam/LocalOrientedPlane3Factor.cpp index 25d7083f8..3a8cd0c6c 100644 --- a/gtsam_unstable/slam/LocalOrientedPlane3Factor.cpp +++ b/gtsam_unstable/slam/LocalOrientedPlane3Factor.cpp @@ -15,8 +15,8 @@ namespace gtsam { void LocalOrientedPlane3Factor::print(const string& s, const KeyFormatter& keyFormatter) const { cout << s << (s == "" ? "" : "\n"); - cout << "LocalOrientedPlane3Factor Factor (" << keyFormatter(key1()) << ", " - << keyFormatter(key2()) << ", " << keyFormatter(key3()) << ")\n"; + cout << "LocalOrientedPlane3Factor Factor (" << keyFormatter(key<1>()) << ", " + << keyFormatter(key<2>()) << ", " << keyFormatter(key<3>()) << ")\n"; measured_p_.print("Measured Plane"); this->noiseModel_->print(" noise model: "); } From fd55e09bcc766310b5e2946b38619c69ffc42ed8 Mon Sep 17 00:00:00 2001 From: Akshay Krishnan Date: Sat, 24 Dec 2022 10:06:28 -0500 Subject: [PATCH 06/41] add rotate() test --- python/gtsam/tests/test_Rot3.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/python/gtsam/tests/test_Rot3.py b/python/gtsam/tests/test_Rot3.py index a1ce01ba2..5aa22bb0b 100644 --- a/python/gtsam/tests/test_Rot3.py +++ b/python/gtsam/tests/test_Rot3.py @@ -13,7 +13,7 @@ import unittest import numpy as np import gtsam -from gtsam import Rot3 +from gtsam import Point3, Rot3, Unit3 from gtsam.utils.test_case import GtsamTestCase @@ -2032,6 +2032,19 @@ class TestRot3(GtsamTestCase): angle_deg = np.rad2deg(angle) assert angle_deg < 180 + def test_rotate(self) -> None: + """Test that rotate() works for both Point3 and Unit3.""" + R = Rot3(np.array([[1, 0, 0], [0, -1, 0], [0, 0, 1]])) + p = Point3(1., 1., 1.) + u = Unit3(np.array([1, 1, 1])) + print(type(p)) + actual_p = R.rotate(p) + actual_u = R.rotate(u) + expected_p = Point3(np.array([1, -1, 1])) + expected_u = Unit3(np.array([1, -1, 1])) + np.testing.assert_array_equal(actual_p, expected_p) + np.testing.assert_array_equal(actual_u.point3(), expected_u.point3()) + if __name__ == "__main__": unittest.main() From 3a2816e4fcaac539b9a092632e9b96317ddc27b1 Mon Sep 17 00:00:00 2001 From: Akshay Krishnan Date: Sat, 24 Dec 2022 10:15:08 -0500 Subject: [PATCH 07/41] add unrotate() test --- python/gtsam/tests/test_Rot3.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/python/gtsam/tests/test_Rot3.py b/python/gtsam/tests/test_Rot3.py index 5aa22bb0b..e1eeb7fe4 100644 --- a/python/gtsam/tests/test_Rot3.py +++ b/python/gtsam/tests/test_Rot3.py @@ -2037,7 +2037,6 @@ class TestRot3(GtsamTestCase): R = Rot3(np.array([[1, 0, 0], [0, -1, 0], [0, 0, 1]])) p = Point3(1., 1., 1.) u = Unit3(np.array([1, 1, 1])) - print(type(p)) actual_p = R.rotate(p) actual_u = R.rotate(u) expected_p = Point3(np.array([1, -1, 1])) @@ -2045,6 +2044,19 @@ class TestRot3(GtsamTestCase): np.testing.assert_array_equal(actual_p, expected_p) np.testing.assert_array_equal(actual_u.point3(), expected_u.point3()) + def test_unrotate(self) -> None: + """Test that unrotate() after rotate() yields original Point3/Unit3.""" + wRc = Rot3(np.array(R1_R2_pairs[0][0])) + c_p = Point3(1., 1., 1.) + c_u = Unit3(np.array([1, 1, 1])) + w_p = wRc.rotate(c_p) + w_u = wRc.rotate(c_u) + actual_p = wRc.unrotate(w_p) + actual_u = wRc.unrotate(w_u) + + np.testing.assert_almost_equal(actual_p, c_p, decimal=6) + np.testing.assert_almost_equal(actual_u.point3(), c_u.point3(), decimal=6) + if __name__ == "__main__": unittest.main() From 41a96473b577d1b08bbbc89f4ca26deaf20a856a Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 25 Dec 2022 23:53:22 -0500 Subject: [PATCH 08/41] Pass reference --- gtsam/hybrid/HybridValues.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridValues.h b/gtsam/hybrid/HybridValues.h index 4928f9384..944fe17e6 100644 --- a/gtsam/hybrid/HybridValues.h +++ b/gtsam/hybrid/HybridValues.h @@ -78,10 +78,10 @@ class GTSAM_EXPORT HybridValues { /// @{ /// Return the discrete MPE assignment - DiscreteValues discrete() const { return discrete_; } + const DiscreteValues& discrete() const { return discrete_; } /// Return the delta update for the continuous vectors - VectorValues continuous() const { return continuous_; } + const VectorValues& continuous() const { return continuous_; } /// Check whether a variable with key \c j exists in DiscreteValue. bool existsDiscrete(Key j) { return (discrete_.find(j) != discrete_.end()); }; From b04f2f8582426857024ed54159cb55ace7739669 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 28 Dec 2022 08:18:00 -0500 Subject: [PATCH 09/41] HBN::evaluate # Conflicts: # gtsam/hybrid/HybridBayesNet.cpp # gtsam/hybrid/tests/testHybridBayesNet.cpp --- gtsam/hybrid/HybridBayesNet.cpp | 44 ++++++++++-- gtsam/hybrid/HybridBayesNet.h | 8 +++ gtsam/hybrid/tests/testHybridBayesNet.cpp | 83 ++++++++++++++++++----- 3 files changed, 113 insertions(+), 22 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 7a46d7832..12b56ece8 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -150,9 +150,7 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) { // Go through all the conditionals in the // Bayes Net and prune them as per decisionTree. - for (size_t i = 0; i < this->size(); i++) { - HybridConditional::shared_ptr conditional = this->at(i); - + for (auto &&conditional : *this) { if (conditional->isHybrid()) { GaussianMixture::shared_ptr gaussianMixture = conditional->asMixture(); @@ -225,16 +223,50 @@ HybridValues HybridBayesNet::optimize() const { DiscreteValues mpe = DiscreteFactorGraph(discrete_bn).optimize(); // Given the MPE, compute the optimal continuous values. - GaussianBayesNet gbn = this->choose(mpe); + GaussianBayesNet gbn = choose(mpe); return HybridValues(mpe, gbn.optimize()); } /* ************************************************************************* */ VectorValues HybridBayesNet::optimize(const DiscreteValues &assignment) const { - GaussianBayesNet gbn = this->choose(assignment); + GaussianBayesNet gbn = choose(assignment); return gbn.optimize(); } +/* ************************************************************************* */ +double HybridBayesNet::evaluate(const HybridValues &values) const { + const DiscreteValues& discreteValues = values.discrete(); + const VectorValues& continuosValues = values.continuous(); + + double probability = 1.0; + + // Iterate over each conditional. + for (auto &&conditional : *this) { + if (conditional->isHybrid()) { + // If conditional is hybrid, select based on assignment and compute error. + // GaussianMixture::shared_ptr gm = conditional->asMixture(); + // AlgebraicDecisionTree conditional_error = + // gm->error(continuousValues); + + // error_tree = error_tree + conditional_error; + + } else if (conditional->isContinuous()) { + // If continuous only, get the (double) error + // and add it to the error_tree + // double error = conditional->asGaussian()->error(continuousValues); + // // Add the computed error to every leaf of the error tree. + // error_tree = error_tree.apply( + // [error](double leaf_value) { return leaf_value + error; }); + } else if (conditional->isDiscrete()) { + // Conditional is discrete-only, we skip. + probability *= + conditional->asDiscreteConditional()->operator()(discreteValues); + } + } + + return probability; +} + /* ************************************************************************* */ HybridValues HybridBayesNet::sample(const HybridValues &given, std::mt19937_64 *rng) const { @@ -273,7 +305,7 @@ HybridValues HybridBayesNet::sample() const { /* ************************************************************************* */ double HybridBayesNet::error(const VectorValues &continuousValues, const DiscreteValues &discreteValues) const { - GaussianBayesNet gbn = this->choose(discreteValues); + GaussianBayesNet gbn = choose(discreteValues); return gbn.error(continuousValues); } diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 1e6bebf1a..ff13ca1b7 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -95,6 +95,14 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { */ GaussianBayesNet choose(const DiscreteValues &assignment) const; + //** evaluate for given HybridValues */ + double evaluate(const HybridValues &values) const; + + //** (Preferred) sugar for the above for given DiscreteValues */ + double operator()(const HybridValues &values) const { + return evaluate(values); + } + /** * @brief Solve the HybridBayesNet by first computing the MPE of all the * discrete variables and then optimizing the continuous variables based on diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 3e3fab376..8f9632ba3 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -36,36 +36,87 @@ using noiseModel::Isotropic; using symbol_shorthand::M; using symbol_shorthand::X; -static const DiscreteKey Asia(0, 2); +static const Key asiaKey = 0; +static const DiscreteKey Asia(asiaKey, 2); /* ****************************************************************************/ -// Test creation +// Test creation of a pure discrete Bayes net. TEST(HybridBayesNet, Creation) { HybridBayesNet bayesNet; - bayesNet.add(Asia, "99/1"); DiscreteConditional expected(Asia, "99/1"); - CHECK(bayesNet.atDiscrete(0)); - auto& df = *bayesNet.atDiscrete(0); - EXPECT(df.equals(expected)); + EXPECT(assert_equal(expected, *bayesNet.atDiscrete(0))); } /* ****************************************************************************/ -// Test adding a bayes net to another one. +// Test adding a Bayes net to another one. TEST(HybridBayesNet, Add) { HybridBayesNet bayesNet; - bayesNet.add(Asia, "99/1"); - DiscreteConditional expected(Asia, "99/1"); - HybridBayesNet other; other.push_back(bayesNet); EXPECT(bayesNet.equals(other)); } +/* ****************************************************************************/ +// Test evaluate for a pure discrete Bayes net P(Asia). +TEST(HybridBayesNet, evaluatePureDiscrete) { + HybridBayesNet bayesNet; + bayesNet.add(Asia, "99/1"); + HybridValues values; + values.insert(asiaKey, 0); + EXPECT_DOUBLES_EQUAL(0.99, bayesNet.evaluate(values), 1e-9); +} + +/* ****************************************************************************/ +// Test evaluate for a hybrid Bayes net P(X1|Asia) P(Asia). +TEST(HybridBayesNet, evaluateHybrid) { + HybridBayesNet bayesNet; + + SharedDiagonal model = noiseModel::Diagonal::Sigmas(Vector2(1.0, 0.34)); + + const Vector2 d0(1, 2); + Matrix22 R0 = Matrix22::Ones(); + const auto conditional0 = + boost::make_shared(X(1), d0, R0, model); + + const Vector2 d1(2, 1); + Matrix22 R1 = Matrix22::Ones(); + const auto conditional1 = + boost::make_shared(X(1), d1, R1, model); + + // TODO(dellaert): creating and adding mixture is clumsy. + std::vector conditionals{conditional0, + conditional1}; + const auto mixture = + GaussianMixture::FromConditionals({X(1)}, {}, {Asia}, conditionals); + bayesNet.push_back( + HybridConditional(boost::make_shared(mixture))); + + // Add component probabilities. + bayesNet.add(Asia, "99/1"); + + // Create values at which to evaluate. + HybridValues values; + values.insert(asiaKey, 0); + values.insert(X(1), Vector2(1, 2)); + + // TODO(dellaert): we need real probabilities! + const double conditionalProbability = + conditional0->error(values.continuous()); + EXPECT_DOUBLES_EQUAL(conditionalProbability * 0.99, bayesNet.evaluate(values), 1e-9); +} + + // const Vector2 d1(2, 1); + // Matrix22 R1 = Matrix22::Ones(); + // Matrix22 S1 = Matrix22::Identity() * 2; + // const auto conditional1 = + // boost::make_shared(X(1), d1, R1, X(2), S1, model); + + /* ****************************************************************************/ // Test choosing an assignment of conditionals TEST(HybridBayesNet, Choose) { @@ -105,7 +156,7 @@ TEST(HybridBayesNet, Choose) { } /* ****************************************************************************/ -// Test bayes net optimize +// Test Bayes net optimize TEST(HybridBayesNet, OptimizeAssignment) { Switching s(4); @@ -139,7 +190,7 @@ TEST(HybridBayesNet, OptimizeAssignment) { } /* ****************************************************************************/ -// Test bayes net optimize +// Test Bayes net optimize TEST(HybridBayesNet, Optimize) { Switching s(4); @@ -203,7 +254,7 @@ TEST(HybridBayesNet, Error) { // regression EXPECT(assert_equal(expected_error, error_tree, 1e-9)); - // Error on pruned bayes net + // Error on pruned Bayes net auto prunedBayesNet = hybridBayesNet->prune(2); auto pruned_error_tree = prunedBayesNet.error(delta.continuous()); @@ -238,7 +289,7 @@ TEST(HybridBayesNet, Error) { } /* ****************************************************************************/ -// Test bayes net pruning +// Test Bayes net pruning TEST(HybridBayesNet, Prune) { Switching s(4); @@ -256,7 +307,7 @@ TEST(HybridBayesNet, Prune) { } /* ****************************************************************************/ -// Test bayes net updateDiscreteConditionals +// Test Bayes net updateDiscreteConditionals TEST(HybridBayesNet, UpdateDiscreteConditionals) { Switching s(4); @@ -358,7 +409,7 @@ TEST(HybridBayesNet, Sampling) { // Sample HybridValues sample = bn->sample(&gen); - discrete_samples.push_back(sample.discrete()[M(0)]); + discrete_samples.push_back(sample.discrete().at(M(0))); if (i == 0) { average_continuous.insert(sample.continuous()); From 8d4dc3d880776382b7ce1a4b086a58a658bbd4bf Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 28 Dec 2022 10:19:30 -0500 Subject: [PATCH 10/41] GBN::evaluate prototype code works --- gtsam/linear/tests/testGaussianBayesNet.cpp | 65 ++++++++++++++++++++- 1 file changed, 64 insertions(+), 1 deletion(-) diff --git a/gtsam/linear/tests/testGaussianBayesNet.cpp b/gtsam/linear/tests/testGaussianBayesNet.cpp index 2b125265f..b8534a02b 100644 --- a/gtsam/linear/tests/testGaussianBayesNet.cpp +++ b/gtsam/linear/tests/testGaussianBayesNet.cpp @@ -1,6 +1,6 @@ /* ---------------------------------------------------------------------------- - * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * GTSAM Copyright 2010-2022, Georgia Tech Research Corporation, * Atlanta, Georgia 30332-0415 * All Rights Reserved * Authors: Frank Dellaert, et al. (see THANKS for the full author list) @@ -67,6 +67,69 @@ TEST( GaussianBayesNet, Matrix ) EXPECT(assert_equal(d,d1)); } +/* ************************************************************************* */ +/** + * Calculate log-density for given values `x`: + * -0.5*(error + n*log(2*pi) + log det(Sigma)) + * where x is the vector of values, and Sigma is the covariance matrix. + */ +double logDensity(const GaussianConditional::shared_ptr& gc, + const VectorValues& x) { + constexpr double log2pi = 1.8378770664093454835606594728112; + size_t n = gc->d().size(); + // log det(Sigma)) = - 2 * gc->logDeterminant() + double sum = gc->error(x) + n * log2pi - 2 * gc->logDeterminant(); + return -0.5 * sum; +} + +/** + * Calculate probability density for given values `x`: + * exp(-0.5*error(x)) / sqrt((2*pi)^n*det(Sigma)) + * where x is the vector of values, and Sigma is the covariance matrix. + */ +double evaluate(const GaussianConditional::shared_ptr& gc, + const VectorValues& x) { + return exp(logDensity(gc, x)); +} + +/** Calculate probability for given values `x` */ +double evaluate(const GaussianBayesNet& gbn, const VectorValues& x) { + double density = 1.0; + for (const auto& conditional : gbn) { + if (conditional) density *= evaluate(conditional, x); + } + return density; +} + +// Check that the evaluate function matches direct calculation with R. +TEST(GaussianBayesNet, Evaluate1) { + // Let's evaluate at the mean + const VectorValues mean = smallBayesNet.optimize(); + + // We get the matrix, which has noise model applied! + const Matrix R = smallBayesNet.matrix().first; + const Matrix invSigma = R.transpose() * R; + + // The Bayes net is a Gaussian density ~ exp (-0.5*(Rx-d)'*(Rx-d)) + // which at the mean is 1.0! So, the only thing we need to calculate is + // the normalization constant 1.0/sqrt((2*pi*Sigma).det()). + // The covariance matrix inv(Sigma) = R'*R, so the determinant is + const double expected = sqrt((invSigma / (2 * M_PI)).determinant()); + const double actual = evaluate(smallBayesNet, mean); + EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9); +} + +// Check the evaluate with non-unit noise. +TEST(GaussianBayesNet, Evaluate2) { + // See comments in test above. + const VectorValues mean = noisyBayesNet.optimize(); + const Matrix R = noisyBayesNet.matrix().first; + const Matrix invSigma = R.transpose() * R; + const double expected = sqrt((invSigma / (2 * M_PI)).determinant()); + const double actual = evaluate(noisyBayesNet, mean); + EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9); +} + /* ************************************************************************* */ TEST( GaussianBayesNet, NoisyMatrix ) { From 1d3a7d47530f70d1ad61f141ee93f1f85ddb7ad2 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 28 Dec 2022 10:59:01 -0500 Subject: [PATCH 11/41] Added logDensity and evaluate to Gaussian conditional and Bayes net --- gtsam/linear/GaussianBayesNet.cpp | 14 +++++++ gtsam/linear/GaussianBayesNet.h | 14 +++++++ gtsam/linear/GaussianConditional.cpp | 14 +++++++ gtsam/linear/GaussianConditional.h | 21 +++++++--- gtsam/linear/tests/testGaussianBayesNet.cpp | 37 +----------------- .../linear/tests/testGaussianConditional.cpp | 38 +++++++++++++++++++ 6 files changed, 98 insertions(+), 40 deletions(-) diff --git a/gtsam/linear/GaussianBayesNet.cpp b/gtsam/linear/GaussianBayesNet.cpp index 229d4a932..4c1338435 100644 --- a/gtsam/linear/GaussianBayesNet.cpp +++ b/gtsam/linear/GaussianBayesNet.cpp @@ -224,5 +224,19 @@ namespace gtsam { } /* ************************************************************************* */ + double GaussianBayesNet::logDensity(const VectorValues& x) const { + double sum = 0.0; + for (const auto& conditional : *this) { + if (conditional) sum += conditional->logDensity(x); + } + return sum; + } + + /* ************************************************************************* */ + double GaussianBayesNet::evaluate(const VectorValues& x) const { + return exp(logDensity(x)); + } + + /* ************************************************************************* */ } // namespace gtsam diff --git a/gtsam/linear/GaussianBayesNet.h b/gtsam/linear/GaussianBayesNet.h index e81d6af33..381b8ad3a 100644 --- a/gtsam/linear/GaussianBayesNet.h +++ b/gtsam/linear/GaussianBayesNet.h @@ -88,6 +88,20 @@ namespace gtsam { /// @name Standard Interface /// @{ + /** + * Calculate log-density for given values `x`: + * -0.5*(error + n*log(2*pi) + log det(Sigma)) + * where x is the vector of values, and Sigma is the covariance matrix. + */ + double logDensity(const VectorValues& x) const; + + /** + * Calculate probability density for given values `x`: + * exp(-0.5*error(x)) / sqrt((2*pi)^n*det(Sigma)) + * where x is the vector of values, and Sigma is the covariance matrix. + */ + double evaluate(const VectorValues& x) const; + /// Solve the GaussianBayesNet, i.e. return \f$ x = R^{-1}*d \f$, by /// back-substitution VectorValues optimize() const; diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index 4597156bc..ec895b8e7 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -169,6 +169,20 @@ double GaussianConditional::logDeterminant() const { return logDet; } + /* ************************************************************************* */ +double GaussianConditional::logDensity(const VectorValues& x) const { + constexpr double log2pi = 1.8378770664093454835606594728112; + size_t n = d().size(); + // log det(Sigma)) = - 2 * logDeterminant() + double sum = error(x) + n * log2pi - 2 * logDeterminant(); + return -0.5 * sum; +} + +/* ************************************************************************* */ +double GaussianConditional::evaluate(const VectorValues& x) const { + return exp(logDensity(x)); +} + /* ************************************************************************* */ VectorValues GaussianConditional::solve(const VectorValues& x) const { // Concatenate all vector values that correspond to parent variables diff --git a/gtsam/linear/GaussianConditional.h b/gtsam/linear/GaussianConditional.h index 8af7f6602..cf8cada82 100644 --- a/gtsam/linear/GaussianConditional.h +++ b/gtsam/linear/GaussianConditional.h @@ -121,6 +121,20 @@ namespace gtsam { /// @name Standard Interface /// @{ + /** + * Calculate log-density for given values `x`: + * -0.5*(error + n*log(2*pi) + log det(Sigma)) + * where x is the vector of values, and Sigma is the covariance matrix. + */ + double logDensity(const VectorValues& x) const; + + /** + * Calculate probability density for given values `x`: + * exp(-0.5*error(x)) / sqrt((2*pi)^n*det(Sigma)) + * where x is the vector of values, and Sigma is the covariance matrix. + */ + double evaluate(const VectorValues& x) const; + /** Return a view of the upper-triangular R block of the conditional */ constABlock R() const { return Ab_.range(0, nrFrontals()); } @@ -134,9 +148,7 @@ namespace gtsam { const constBVector d() const { return BaseFactor::getb(); } /** - * @brief Compute the log determinant of the Gaussian conditional. - * The determinant is computed using the R matrix, which is upper - * triangular. + * @brief Compute the log determinant of the R matrix. * For numerical stability, the determinant is computed in log * form, so it is a summation rather than a multiplication. * @@ -145,8 +157,7 @@ namespace gtsam { double logDeterminant() const; /** - * @brief Compute the determinant of the conditional from the - * upper-triangular R matrix. + * @brief Compute the determinant of the R matrix. * * The determinant is computed in log form (hence summation) for numerical * stability and then exponentiated. diff --git a/gtsam/linear/tests/testGaussianBayesNet.cpp b/gtsam/linear/tests/testGaussianBayesNet.cpp index b8534a02b..e5e8840c0 100644 --- a/gtsam/linear/tests/testGaussianBayesNet.cpp +++ b/gtsam/linear/tests/testGaussianBayesNet.cpp @@ -68,39 +68,6 @@ TEST( GaussianBayesNet, Matrix ) } /* ************************************************************************* */ -/** - * Calculate log-density for given values `x`: - * -0.5*(error + n*log(2*pi) + log det(Sigma)) - * where x is the vector of values, and Sigma is the covariance matrix. - */ -double logDensity(const GaussianConditional::shared_ptr& gc, - const VectorValues& x) { - constexpr double log2pi = 1.8378770664093454835606594728112; - size_t n = gc->d().size(); - // log det(Sigma)) = - 2 * gc->logDeterminant() - double sum = gc->error(x) + n * log2pi - 2 * gc->logDeterminant(); - return -0.5 * sum; -} - -/** - * Calculate probability density for given values `x`: - * exp(-0.5*error(x)) / sqrt((2*pi)^n*det(Sigma)) - * where x is the vector of values, and Sigma is the covariance matrix. - */ -double evaluate(const GaussianConditional::shared_ptr& gc, - const VectorValues& x) { - return exp(logDensity(gc, x)); -} - -/** Calculate probability for given values `x` */ -double evaluate(const GaussianBayesNet& gbn, const VectorValues& x) { - double density = 1.0; - for (const auto& conditional : gbn) { - if (conditional) density *= evaluate(conditional, x); - } - return density; -} - // Check that the evaluate function matches direct calculation with R. TEST(GaussianBayesNet, Evaluate1) { // Let's evaluate at the mean @@ -115,7 +82,7 @@ TEST(GaussianBayesNet, Evaluate1) { // the normalization constant 1.0/sqrt((2*pi*Sigma).det()). // The covariance matrix inv(Sigma) = R'*R, so the determinant is const double expected = sqrt((invSigma / (2 * M_PI)).determinant()); - const double actual = evaluate(smallBayesNet, mean); + const double actual = smallBayesNet.evaluate(mean); EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9); } @@ -126,7 +93,7 @@ TEST(GaussianBayesNet, Evaluate2) { const Matrix R = noisyBayesNet.matrix().first; const Matrix invSigma = R.transpose() * R; const double expected = sqrt((invSigma / (2 * M_PI)).determinant()); - const double actual = evaluate(noisyBayesNet, mean); + const double actual = noisyBayesNet.evaluate(mean); EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9); } diff --git a/gtsam/linear/tests/testGaussianConditional.cpp b/gtsam/linear/tests/testGaussianConditional.cpp index 6ec06a0ce..a3b64528b 100644 --- a/gtsam/linear/tests/testGaussianConditional.cpp +++ b/gtsam/linear/tests/testGaussianConditional.cpp @@ -130,6 +130,44 @@ TEST( GaussianConditional, equals ) EXPECT( expected.equals(actual) ); } +namespace density { +static const Key key = 77; +static const auto unitPrior = + GaussianConditional(key, Vector1::Constant(5), I_1x1), + widerPrior = + GaussianConditional(key, Vector1::Constant(5), I_1x1, + noiseModel::Isotropic::Sigma(1, 3.0)); +} // namespace density + +/* ************************************************************************* */ +// Check that the evaluate function matches direct calculation with R. +TEST(GaussianConditional, Evaluate1) { + // Let's evaluate at the mean + const VectorValues mean = density::unitPrior.solve(VectorValues()); + + // We get the Hessian matrix, which has noise model applied! + const Matrix invSigma = density::unitPrior.information(); + + // A Gaussian density ~ exp (-0.5*(Rx-d)'*(Rx-d)) + // which at the mean is 1.0! So, the only thing we need to calculate is + // the normalization constant 1.0/sqrt((2*pi*Sigma).det()). + // The covariance matrix inv(Sigma) = R'*R, so the determinant is + const double expected = sqrt((invSigma / (2 * M_PI)).determinant()); + const double actual = density::unitPrior.evaluate(mean); + EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9); +} + +// Check the evaluate with non-unit noise. +TEST(GaussianConditional, Evaluate2) { + // See comments in test above. + const VectorValues mean = density::widerPrior.solve(VectorValues()); + const Matrix R = density::widerPrior.R(); + const Matrix invSigma = density::widerPrior.information(); + const double expected = sqrt((invSigma / (2 * M_PI)).determinant()); + const double actual = density::widerPrior.evaluate(mean); + EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9); +} + /* ************************************************************************* */ TEST( GaussianConditional, solve ) { From 8391c783bf24e862ebf15992bd65b9bf63435ff3 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 28 Dec 2022 12:00:37 -0500 Subject: [PATCH 12/41] Correct densities (error already includes 0.5!) --- gtsam/linear/GaussianBayesNet.h | 22 +++++--- gtsam/linear/GaussianConditional.cpp | 9 ++-- gtsam/linear/GaussianConditional.h | 51 ++++++++++++------- .../linear/tests/testGaussianConditional.cpp | 37 ++++++++++++-- 4 files changed, 85 insertions(+), 34 deletions(-) diff --git a/gtsam/linear/GaussianBayesNet.h b/gtsam/linear/GaussianBayesNet.h index 381b8ad3a..426d3bd83 100644 --- a/gtsam/linear/GaussianBayesNet.h +++ b/gtsam/linear/GaussianBayesNet.h @@ -88,20 +88,26 @@ namespace gtsam { /// @name Standard Interface /// @{ + /** + * Calculate probability density for given values `x`: + * exp(-error(x)) / sqrt((2*pi)^n*det(Sigma)) + * where x is the vector of values, and Sigma is the covariance matrix. + * Note that error(x)=0.5*e'*e includes the 0.5 factor already. + */ + double evaluate(const VectorValues& x) const; + + /// Evaluate probability density, sugar. + double operator()(const VectorValues& x) const { + return evaluate(x); + } + /** * Calculate log-density for given values `x`: - * -0.5*(error + n*log(2*pi) + log det(Sigma)) + * -error(x) - 0.5 * n*log(2*pi) - 0.5 * log det(Sigma) * where x is the vector of values, and Sigma is the covariance matrix. */ double logDensity(const VectorValues& x) const; - /** - * Calculate probability density for given values `x`: - * exp(-0.5*error(x)) / sqrt((2*pi)^n*det(Sigma)) - * where x is the vector of values, and Sigma is the covariance matrix. - */ - double evaluate(const VectorValues& x) const; - /// Solve the GaussianBayesNet, i.e. return \f$ x = R^{-1}*d \f$, by /// back-substitution VectorValues optimize() const; diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index ec895b8e7..5e8a193cf 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -169,13 +169,14 @@ double GaussianConditional::logDeterminant() const { return logDet; } - /* ************************************************************************* */ +/* ************************************************************************* */ +// density = exp(-error(x)) / sqrt((2*pi)^n*det(Sigma)) +// log = -error(x) - 0.5 * n*log(2*pi) - 0.5 * log det(Sigma) double GaussianConditional::logDensity(const VectorValues& x) const { constexpr double log2pi = 1.8378770664093454835606594728112; size_t n = d().size(); - // log det(Sigma)) = - 2 * logDeterminant() - double sum = error(x) + n * log2pi - 2 * logDeterminant(); - return -0.5 * sum; + // log det(Sigma)) = - 2.0 * logDeterminant() + return - error(x) - 0.5 * n * log2pi + logDeterminant(); } /* ************************************************************************* */ diff --git a/gtsam/linear/GaussianConditional.h b/gtsam/linear/GaussianConditional.h index cf8cada82..a72a73973 100644 --- a/gtsam/linear/GaussianConditional.h +++ b/gtsam/linear/GaussianConditional.h @@ -121,20 +121,26 @@ namespace gtsam { /// @name Standard Interface /// @{ + /** + * Calculate probability density for given values `x`: + * exp(-error(x)) / sqrt((2*pi)^n*det(Sigma)) + * where x is the vector of values, and Sigma is the covariance matrix. + * Note that error(x)=0.5*e'*e includes the 0.5 factor already. + */ + double evaluate(const VectorValues& x) const; + + /// Evaluate probability density, sugar. + double operator()(const VectorValues& x) const { + return evaluate(x); + } + /** * Calculate log-density for given values `x`: - * -0.5*(error + n*log(2*pi) + log det(Sigma)) + * -error(x) - 0.5 * n*log(2*pi) - 0.5 * log det(Sigma) * where x is the vector of values, and Sigma is the covariance matrix. */ double logDensity(const VectorValues& x) const; - /** - * Calculate probability density for given values `x`: - * exp(-0.5*error(x)) / sqrt((2*pi)^n*det(Sigma)) - * where x is the vector of values, and Sigma is the covariance matrix. - */ - double evaluate(const VectorValues& x) const; - /** Return a view of the upper-triangular R block of the conditional */ constABlock R() const { return Ab_.range(0, nrFrontals()); } @@ -147,25 +153,32 @@ namespace gtsam { /** Get a view of the r.h.s. vector d */ const constBVector d() const { return BaseFactor::getb(); } - /** - * @brief Compute the log determinant of the R matrix. - * For numerical stability, the determinant is computed in log - * form, so it is a summation rather than a multiplication. - * - * @return double - */ - double logDeterminant() const; - /** * @brief Compute the determinant of the R matrix. * - * The determinant is computed in log form (hence summation) for numerical - * stability and then exponentiated. + * The determinant is computed in log form using logDeterminant for + * numerical stability and then exponentiated. + * + * Note, the covariance matrix \f$ \Sigma = (R^T R)^{-1} \f$, and hence + * \f$ \det(\Sigma) = 1 / \det(R^T R) = 1 / determinant()^ 2 \f$. * * @return double */ double determinant() const { return exp(this->logDeterminant()); } + /** + * @brief Compute the log determinant of the R matrix. + * + * For numerical stability, the determinant is computed in log + * form, so it is a summation rather than a multiplication. + * + * Note, the covariance matrix \f$ \Sigma = (R^T R)^{-1} \f$, and hence + * \f$ \log \det(\Sigma) = - \log \det(R^T R) = - 2 logDeterminant() \f$. + * + * @return double + */ + double logDeterminant() const; + /** * Solves a conditional Gaussian and writes the solution into the entries of * \c x for each frontal variable of the conditional. The parents are diff --git a/gtsam/linear/tests/testGaussianConditional.cpp b/gtsam/linear/tests/testGaussianConditional.cpp index a3b64528b..20d730856 100644 --- a/gtsam/linear/tests/testGaussianConditional.cpp +++ b/gtsam/linear/tests/testGaussianConditional.cpp @@ -130,13 +130,15 @@ TEST( GaussianConditional, equals ) EXPECT( expected.equals(actual) ); } +/* ************************************************************************* */ namespace density { static const Key key = 77; +static constexpr double sigma = 3.0; static const auto unitPrior = GaussianConditional(key, Vector1::Constant(5), I_1x1), - widerPrior = - GaussianConditional(key, Vector1::Constant(5), I_1x1, - noiseModel::Isotropic::Sigma(1, 3.0)); + widerPrior = GaussianConditional( + key, Vector1::Constant(5), I_1x1, + noiseModel::Isotropic::Sigma(1, sigma)); } // namespace density /* ************************************************************************* */ @@ -155,8 +157,23 @@ TEST(GaussianConditional, Evaluate1) { const double expected = sqrt((invSigma / (2 * M_PI)).determinant()); const double actual = density::unitPrior.evaluate(mean); EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9); + + using density::key; + using density::sigma; + + // Let's numerically integrate and see that we integrate to 1.0. + double integral = 0.0; + // Loop from -5*sigma to 5*sigma in 0.1*sigma steps: + for (double x = -5.0 * sigma; x <= 5.0 * sigma; x += 0.1 * sigma) { + VectorValues xValues; + xValues.insert(key, mean.at(key) + Vector1(x)); + const double density = density::unitPrior.evaluate(xValues); + integral += 0.1 * sigma * density; + } + EXPECT_DOUBLES_EQUAL(1.0, integral, 1e-9); } +/* ************************************************************************* */ // Check the evaluate with non-unit noise. TEST(GaussianConditional, Evaluate2) { // See comments in test above. @@ -166,6 +183,20 @@ TEST(GaussianConditional, Evaluate2) { const double expected = sqrt((invSigma / (2 * M_PI)).determinant()); const double actual = density::widerPrior.evaluate(mean); EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9); + + using density::key; + using density::sigma; + + // Let's numerically integrate and see that we integrate to 1.0. + double integral = 0.0; + // Loop from -5*sigma to 5*sigma in 0.1*sigma steps: + for (double x = -5.0 * sigma; x <= 5.0 * sigma; x += 0.1 * sigma) { + VectorValues xValues; + xValues.insert(key, mean.at(key) + Vector1(x)); + const double density = density::widerPrior.evaluate(xValues); + integral += 0.1 * sigma * density; + } + EXPECT_DOUBLES_EQUAL(1.0, integral, 1e-5); } /* ************************************************************************* */ From b3b635cd94ee22f15db97eeb957a74b5e3c3f602 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 28 Dec 2022 12:31:13 -0500 Subject: [PATCH 13/41] Test sampling using Monte Carlo integration --- gtsam/linear/tests/testGaussianBayesNet.cpp | 35 ++++++++++++++++----- 1 file changed, 28 insertions(+), 7 deletions(-) diff --git a/gtsam/linear/tests/testGaussianBayesNet.cpp b/gtsam/linear/tests/testGaussianBayesNet.cpp index e5e8840c0..15f0d098a 100644 --- a/gtsam/linear/tests/testGaussianBayesNet.cpp +++ b/gtsam/linear/tests/testGaussianBayesNet.cpp @@ -172,14 +172,18 @@ TEST( GaussianBayesNet, optimize3 ) } /* ************************************************************************* */ -TEST(GaussianBayesNet, sample) { - GaussianBayesNet gbn; - Matrix A1 = (Matrix(2, 2) << 1., 2., 3., 4.).finished(); - const Vector2 mean(20, 40), b(10, 10); - const double sigma = 0.01; +namespace sampling { +static Matrix A1 = (Matrix(2, 2) << 1., 2., 3., 4.).finished(); +static const Vector2 mean(20, 40), b(10, 10); +static const double sigma = 0.01; +static const GaussianBayesNet gbn = + list_of(GaussianConditional::FromMeanAndStddev(X(0), A1, X(1), b, sigma))( + GaussianDensity::FromMeanAndStddev(X(1), mean, sigma)); +} // namespace sampling - gbn.add(GaussianConditional::FromMeanAndStddev(X(0), A1, X(1), b, sigma)); - gbn.add(GaussianDensity::FromMeanAndStddev(X(1), mean, sigma)); +/* ************************************************************************* */ +TEST(GaussianBayesNet, sample) { + using namespace sampling; auto actual = gbn.sample(); EXPECT_LONGS_EQUAL(2, actual.size()); @@ -195,6 +199,23 @@ TEST(GaussianBayesNet, sample) { // EXPECT(assert_equal(Vector2(110.032083, 230.039811), actual[X(0)], 1e-5)); } +/* ************************************************************************* */ +// Do Monte Carlo integration of square deviation, should be equal to 9.0. +TEST(GaussianBayesNet, MonteCarloIntegration) { + GaussianBayesNet gbn; + gbn.push_back(noisyBayesNet.at(1)); + + double sum = 0.0; + constexpr size_t N = 500; + // loop for N samples: + for (size_t i = 0; i < N; i++) { + const auto X_i = gbn.sample(); + sum += pow(X_i[_y_].x() - 5.0, 2.0); + } + // Expected is variance = 3*3 + EXPECT_DOUBLES_EQUAL(9.0, sum / N, 0.1); +} + /* ************************************************************************* */ TEST(GaussianBayesNet, ordering) { From 911e46b0a0fa9cdb46cef3b2b1c4241888828c91 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 28 Dec 2022 12:55:38 -0500 Subject: [PATCH 14/41] Evaluate for hybrid Bayes nets --- gtsam/hybrid/HybridBayesNet.cpp | 25 ++++------- gtsam/hybrid/HybridBayesNet.h | 4 +- gtsam/hybrid/tests/testHybridBayesNet.cpp | 51 ++++++++++------------- 3 files changed, 33 insertions(+), 47 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 12b56ece8..6024add07 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -235,30 +235,23 @@ VectorValues HybridBayesNet::optimize(const DiscreteValues &assignment) const { /* ************************************************************************* */ double HybridBayesNet::evaluate(const HybridValues &values) const { - const DiscreteValues& discreteValues = values.discrete(); - const VectorValues& continuosValues = values.continuous(); + const DiscreteValues &discreteValues = values.discrete(); + const VectorValues &continuousValues = values.continuous(); double probability = 1.0; // Iterate over each conditional. for (auto &&conditional : *this) { if (conditional->isHybrid()) { - // If conditional is hybrid, select based on assignment and compute error. - // GaussianMixture::shared_ptr gm = conditional->asMixture(); - // AlgebraicDecisionTree conditional_error = - // gm->error(continuousValues); - - // error_tree = error_tree + conditional_error; - + // If conditional is hybrid, select based on assignment and evaluate. + const GaussianMixture::shared_ptr gm = conditional->asMixture(); + const auto conditional = (*gm)(discreteValues); + probability *= conditional->evaluate(continuousValues); } else if (conditional->isContinuous()) { - // If continuous only, get the (double) error - // and add it to the error_tree - // double error = conditional->asGaussian()->error(continuousValues); - // // Add the computed error to every leaf of the error tree. - // error_tree = error_tree.apply( - // [error](double leaf_value) { return leaf_value + error; }); + // If continuous only, evaluate the probability and multiply. + probability *= conditional->asGaussian()->evaluate(continuousValues); } else if (conditional->isDiscrete()) { - // Conditional is discrete-only, we skip. + // Conditional is discrete-only, so return its probability. probability *= conditional->asDiscreteConditional()->operator()(discreteValues); } diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index ff13ca1b7..4e41cb11d 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -95,10 +95,10 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { */ GaussianBayesNet choose(const DiscreteValues &assignment) const; - //** evaluate for given HybridValues */ + /// Evaluate hybrid probability density for given HybridValues. double evaluate(const HybridValues &values) const; - //** (Preferred) sugar for the above for given DiscreteValues */ + /// Evaluate hybrid probability density for given HybridValues, sugar. double operator()(const HybridValues &values) const { return evaluate(values); } diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 8f9632ba3..f90152abe 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -72,51 +72,44 @@ TEST(HybridBayesNet, evaluatePureDiscrete) { } /* ****************************************************************************/ -// Test evaluate for a hybrid Bayes net P(X1|Asia) P(Asia). +// Test evaluate for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia). TEST(HybridBayesNet, evaluateHybrid) { - HybridBayesNet bayesNet; + const auto continuousConditional = GaussianConditional::FromMeanAndStddev( + X(0), 2 * I_1x1, X(1), Vector1(-4.0), 5.0); - SharedDiagonal model = noiseModel::Diagonal::Sigmas(Vector2(1.0, 0.34)); + const SharedDiagonal model0 = noiseModel::Diagonal::Sigmas(Vector1(2.0)), + model1 = noiseModel::Diagonal::Sigmas(Vector1(3.0)); - const Vector2 d0(1, 2); - Matrix22 R0 = Matrix22::Ones(); - const auto conditional0 = - boost::make_shared(X(1), d0, R0, model); - - const Vector2 d1(2, 1); - Matrix22 R1 = Matrix22::Ones(); - const auto conditional1 = - boost::make_shared(X(1), d1, R1, model); + const auto conditional0 = boost::make_shared( + X(1), Vector1::Constant(5), I_1x1, model0), + conditional1 = boost::make_shared( + X(1), Vector1::Constant(2), I_1x1, model1); // TODO(dellaert): creating and adding mixture is clumsy. - std::vector conditionals{conditional0, - conditional1}; - const auto mixture = - GaussianMixture::FromConditionals({X(1)}, {}, {Asia}, conditionals); + const auto mixture = GaussianMixture::FromConditionals( + {X(1)}, {}, {Asia}, {conditional0, conditional1}); + + // Create hybrid Bayes net. + HybridBayesNet bayesNet; + bayesNet.push_back(HybridConditional( + boost::make_shared(continuousConditional))); bayesNet.push_back( HybridConditional(boost::make_shared(mixture))); - - // Add component probabilities. bayesNet.add(Asia, "99/1"); // Create values at which to evaluate. HybridValues values; values.insert(asiaKey, 0); - values.insert(X(1), Vector2(1, 2)); + values.insert(X(0), Vector1(-6)); + values.insert(X(1), Vector1(1)); - // TODO(dellaert): we need real probabilities! const double conditionalProbability = - conditional0->error(values.continuous()); - EXPECT_DOUBLES_EQUAL(conditionalProbability * 0.99, bayesNet.evaluate(values), 1e-9); + continuousConditional.evaluate(values.continuous()); + const double mixtureProbability = conditional0->evaluate(values.continuous()); + EXPECT_DOUBLES_EQUAL(conditionalProbability * mixtureProbability * 0.99, + bayesNet.evaluate(values), 1e-9); } - // const Vector2 d1(2, 1); - // Matrix22 R1 = Matrix22::Ones(); - // Matrix22 S1 = Matrix22::Identity() * 2; - // const auto conditional1 = - // boost::make_shared(X(1), d1, R1, X(2), S1, model); - - /* ****************************************************************************/ // Test choosing an assignment of conditionals TEST(HybridBayesNet, Choose) { From c984a5ffa251384b90efb337b6f982b68eacd30e Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 28 Dec 2022 13:52:40 -0500 Subject: [PATCH 15/41] Switch to dynamic pointer cast --- gtsam/hybrid/HybridConditional.h | 23 +++++++++-------------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/gtsam/hybrid/HybridConditional.h b/gtsam/hybrid/HybridConditional.h index 050f10290..db03ba59c 100644 --- a/gtsam/hybrid/HybridConditional.h +++ b/gtsam/hybrid/HybridConditional.h @@ -131,34 +131,29 @@ class GTSAM_EXPORT HybridConditional /** * @brief Return HybridConditional as a GaussianMixture - * - * @return GaussianMixture::shared_ptr + * @return nullptr if not a mixture + * @return GaussianMixture::shared_ptr otherwise */ GaussianMixture::shared_ptr asMixture() { - if (!isHybrid()) throw std::invalid_argument("Not a mixture"); - return boost::static_pointer_cast(inner_); + return boost::dynamic_pointer_cast(inner_); } /** * @brief Return HybridConditional as a GaussianConditional - * - * @return GaussianConditional::shared_ptr + * @return nullptr if not a GaussianConditional + * @return GaussianConditional::shared_ptr otherwise */ GaussianConditional::shared_ptr asGaussian() { - if (!isContinuous()) - throw std::invalid_argument("Not a continuous conditional"); - return boost::static_pointer_cast(inner_); + return boost::dynamic_pointer_cast(inner_); } /** * @brief Return conditional as a DiscreteConditional - * + * @return nullptr if not a DiscreteConditional * @return DiscreteConditional::shared_ptr */ - DiscreteConditional::shared_ptr asDiscreteConditional() { - if (!isDiscrete()) - throw std::invalid_argument("Not a discrete conditional"); - return boost::static_pointer_cast(inner_); + DiscreteConditional::shared_ptr asDiscrete() { + return boost::dynamic_pointer_cast(inner_); } /// @} From 1134d1c88e1fb28daa50a3467b7c97408a1ab7b5 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 28 Dec 2022 13:52:59 -0500 Subject: [PATCH 16/41] Refactor with uniform dynamic pointer cast API --- gtsam/hybrid/HybridBayesNet.cpp | 64 ++++++++----------- gtsam/hybrid/HybridBayesTree.cpp | 4 +- gtsam/hybrid/tests/testHybridBayesNet.cpp | 6 +- .../tests/testHybridGaussianFactorGraph.cpp | 2 +- gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 9 ++- .../hybrid/tests/testHybridNonlinearISAM.cpp | 9 ++- 6 files changed, 39 insertions(+), 55 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 6024add07..c598b7d62 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -36,7 +36,7 @@ DecisionTreeFactor::shared_ptr HybridBayesNet::discreteConditionals() const { for (auto &&conditional : *this) { if (conditional->isDiscrete()) { // Convert to a DecisionTreeFactor and add it to the main factor. - DecisionTreeFactor f(*conditional->asDiscreteConditional()); + DecisionTreeFactor f(*conditional->asDiscrete()); dtFactor = dtFactor * f; } } @@ -108,7 +108,7 @@ void HybridBayesNet::updateDiscreteConditionals( HybridConditional::shared_ptr conditional = this->at(i); if (conditional->isDiscrete()) { // std::cout << demangle(typeid(conditional).name()) << std::endl; - auto discrete = conditional->asDiscreteConditional(); + auto discrete = conditional->asDiscrete(); KeyVector frontals(discrete->frontals().begin(), discrete->frontals().end()); @@ -151,13 +151,10 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) { // Go through all the conditionals in the // Bayes Net and prune them as per decisionTree. for (auto &&conditional : *this) { - if (conditional->isHybrid()) { - GaussianMixture::shared_ptr gaussianMixture = conditional->asMixture(); - + if (auto gm = conditional->asMixture()) { // Make a copy of the Gaussian mixture and prune it! - auto prunedGaussianMixture = - boost::make_shared(*gaussianMixture); - prunedGaussianMixture->prune(*decisionTree); + auto prunedGaussianMixture = boost::make_shared(*gm); + prunedGaussianMixture->prune(*decisionTree); // imperative :-( // Type-erase and add to the pruned Bayes Net fragment. prunedBayesNetFragment.push_back( @@ -184,7 +181,7 @@ GaussianConditional::shared_ptr HybridBayesNet::atGaussian(size_t i) const { /* ************************************************************************* */ DiscreteConditional::shared_ptr HybridBayesNet::atDiscrete(size_t i) const { - return at(i)->asDiscreteConditional(); + return at(i)->asDiscrete(); } /* ************************************************************************* */ @@ -192,16 +189,13 @@ GaussianBayesNet HybridBayesNet::choose( const DiscreteValues &assignment) const { GaussianBayesNet gbn; for (auto &&conditional : *this) { - if (conditional->isHybrid()) { + if (auto gm = conditional->asMixture()) { // If conditional is hybrid, select based on assignment. - GaussianMixture gm = *conditional->asMixture(); - gbn.push_back(gm(assignment)); - - } else if (conditional->isContinuous()) { + gbn.push_back((*gm)(assignment)); + } else if (auto gc = conditional->asGaussian()) { // If continuous only, add Gaussian conditional. - gbn.push_back((conditional->asGaussian())); - - } else if (conditional->isDiscrete()) { + gbn.push_back(gc); + } else if (auto dc = conditional->asDiscrete()) { // If conditional is discrete-only, we simply continue. continue; } @@ -216,7 +210,7 @@ HybridValues HybridBayesNet::optimize() const { DiscreteBayesNet discrete_bn; for (auto &&conditional : *this) { if (conditional->isDiscrete()) { - discrete_bn.push_back(conditional->asDiscreteConditional()); + discrete_bn.push_back(conditional->asDiscrete()); } } @@ -238,26 +232,23 @@ double HybridBayesNet::evaluate(const HybridValues &values) const { const DiscreteValues &discreteValues = values.discrete(); const VectorValues &continuousValues = values.continuous(); - double probability = 1.0; + double logDensity = 0.0, probability = 1.0; // Iterate over each conditional. for (auto &&conditional : *this) { - if (conditional->isHybrid()) { - // If conditional is hybrid, select based on assignment and evaluate. - const GaussianMixture::shared_ptr gm = conditional->asMixture(); - const auto conditional = (*gm)(discreteValues); - probability *= conditional->evaluate(continuousValues); - } else if (conditional->isContinuous()) { + if (auto gm = conditional->asMixture()) { + const auto component = (*gm)(discreteValues); + logDensity += component->logDensity(continuousValues); + } else if (auto gc = conditional->asGaussian()) { // If continuous only, evaluate the probability and multiply. - probability *= conditional->asGaussian()->evaluate(continuousValues); - } else if (conditional->isDiscrete()) { + logDensity += gc->logDensity(continuousValues); + } else if (auto dc = conditional->asDiscrete()) { // Conditional is discrete-only, so return its probability. - probability *= - conditional->asDiscreteConditional()->operator()(discreteValues); + probability *= dc->operator()(discreteValues); } } - return probability; + return probability * exp(logDensity); } /* ************************************************************************* */ @@ -267,7 +258,7 @@ HybridValues HybridBayesNet::sample(const HybridValues &given, for (auto &&conditional : *this) { if (conditional->isDiscrete()) { // If conditional is discrete-only, we add to the discrete Bayes net. - dbn.push_back(conditional->asDiscreteConditional()); + dbn.push_back(conditional->asDiscrete()); } } // Sample a discrete assignment. @@ -309,23 +300,20 @@ AlgebraicDecisionTree HybridBayesNet::error( // Iterate over each conditional. for (auto &&conditional : *this) { - if (conditional->isHybrid()) { + if (auto gm = conditional->asMixture()) { // If conditional is hybrid, select based on assignment and compute error. - GaussianMixture::shared_ptr gm = conditional->asMixture(); AlgebraicDecisionTree conditional_error = gm->error(continuousValues); error_tree = error_tree + conditional_error; - - } else if (conditional->isContinuous()) { + } else if (auto gc = conditional->asGaussian()) { // If continuous only, get the (double) error // and add it to the error_tree - double error = conditional->asGaussian()->error(continuousValues); + double error = gc->error(continuousValues); // Add the computed error to every leaf of the error tree. error_tree = error_tree.apply( [error](double leaf_value) { return leaf_value + error; }); - - } else if (conditional->isDiscrete()) { + } else if (auto dc = conditional->asDiscrete()) { // Conditional is discrete-only, we skip. continue; } diff --git a/gtsam/hybrid/HybridBayesTree.cpp b/gtsam/hybrid/HybridBayesTree.cpp index 8fdedab44..ed70a0aa9 100644 --- a/gtsam/hybrid/HybridBayesTree.cpp +++ b/gtsam/hybrid/HybridBayesTree.cpp @@ -49,7 +49,7 @@ HybridValues HybridBayesTree::optimize() const { // The root should be discrete only, we compute the MPE if (root_conditional->isDiscrete()) { - dbn.push_back(root_conditional->asDiscreteConditional()); + dbn.push_back(root_conditional->asDiscrete()); mpe = DiscreteFactorGraph(dbn).optimize(); } else { throw std::runtime_error( @@ -147,7 +147,7 @@ VectorValues HybridBayesTree::optimize(const DiscreteValues& assignment) const { /* ************************************************************************* */ void HybridBayesTree::prune(const size_t maxNrLeaves) { auto decisionTree = - this->roots_.at(0)->conditional()->asDiscreteConditional(); + this->roots_.at(0)->conditional()->asDiscrete(); DecisionTreeFactor prunedDecisionTree = decisionTree->prune(maxNrLeaves); decisionTree->root_ = prunedDecisionTree.root_; diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index f90152abe..d22087f47 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -317,8 +317,7 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) { EXPECT_LONGS_EQUAL(maxNrLeaves + 2 /*2 zero leaves*/, prunedDecisionTree->nrLeaves()); - auto original_discrete_conditionals = - *(hybridBayesNet->at(4)->asDiscreteConditional()); + auto original_discrete_conditionals = *(hybridBayesNet->at(4)->asDiscrete()); // Prune! hybridBayesNet->prune(maxNrLeaves); @@ -338,8 +337,7 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) { }; // Get the pruned discrete conditionals as an AlgebraicDecisionTree - auto pruned_discrete_conditionals = - hybridBayesNet->at(4)->asDiscreteConditional(); + auto pruned_discrete_conditionals = hybridBayesNet->at(4)->asDiscrete(); auto discrete_conditional_tree = boost::dynamic_pointer_cast( pruned_discrete_conditionals); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 7877461b6..55e4c28ad 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -133,7 +133,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { auto result = hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {M(1)})); - auto dc = result->at(2)->asDiscreteConditional(); + auto dc = result->at(2)->asDiscrete(); DiscreteValues dv; dv[M(1)] = 0; EXPECT_DOUBLES_EQUAL(1, dc->operator()(dv), 1e-3); diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index 18ce7f10e..11bd3b415 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -111,8 +111,7 @@ TEST(HybridGaussianElimination, IncrementalInference) { // Run update step isam.update(graph1); - auto discreteConditional_m0 = - isam[M(0)]->conditional()->asDiscreteConditional(); + auto discreteConditional_m0 = isam[M(0)]->conditional()->asDiscrete(); EXPECT(discreteConditional_m0->keys() == KeyVector({M(0)})); /********************************************************/ @@ -170,10 +169,10 @@ TEST(HybridGaussianElimination, IncrementalInference) { DiscreteValues m00; m00[M(0)] = 0, m00[M(1)] = 0; DiscreteConditional decisionTree = - *(*discreteBayesTree)[M(1)]->conditional()->asDiscreteConditional(); + *(*discreteBayesTree)[M(1)]->conditional()->asDiscrete(); double m00_prob = decisionTree(m00); - auto discreteConditional = isam[M(1)]->conditional()->asDiscreteConditional(); + auto discreteConditional = isam[M(1)]->conditional()->asDiscrete(); // Test if the probability values are as expected with regression tests. DiscreteValues assignment; @@ -535,7 +534,7 @@ TEST(HybridGaussianISAM, NonTrivial) { // The final discrete graph should not be empty since we have eliminated // all continuous variables. - auto discreteTree = inc[M(3)]->conditional()->asDiscreteConditional(); + auto discreteTree = inc[M(3)]->conditional()->asDiscrete(); EXPECT_LONGS_EQUAL(3, discreteTree->size()); // Test if the optimal discrete mode assignment is (1, 1, 1). diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index 3bdb5ed1e..8801a8946 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -124,8 +124,7 @@ TEST(HybridNonlinearISAM, IncrementalInference) { isam.update(graph1, initial); HybridGaussianISAM bayesTree = isam.bayesTree(); - auto discreteConditional_m0 = - bayesTree[M(0)]->conditional()->asDiscreteConditional(); + auto discreteConditional_m0 = bayesTree[M(0)]->conditional()->asDiscrete(); EXPECT(discreteConditional_m0->keys() == KeyVector({M(0)})); /********************************************************/ @@ -187,11 +186,11 @@ TEST(HybridNonlinearISAM, IncrementalInference) { DiscreteValues m00; m00[M(0)] = 0, m00[M(1)] = 0; DiscreteConditional decisionTree = - *(*discreteBayesTree)[M(1)]->conditional()->asDiscreteConditional(); + *(*discreteBayesTree)[M(1)]->conditional()->asDiscrete(); double m00_prob = decisionTree(m00); auto discreteConditional = - bayesTree[M(1)]->conditional()->asDiscreteConditional(); + bayesTree[M(1)]->conditional()->asDiscrete(); // Test if the probability values are as expected with regression tests. DiscreteValues assignment; @@ -558,7 +557,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { // The final discrete graph should not be empty since we have eliminated // all continuous variables. - auto discreteTree = bayesTree[M(3)]->conditional()->asDiscreteConditional(); + auto discreteTree = bayesTree[M(3)]->conditional()->asDiscrete(); EXPECT_LONGS_EQUAL(3, discreteTree->size()); // Test if the optimal discrete mode assignment is (1, 1, 1). From d5378679811d005db8f1007723f22a34c273931a Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 28 Dec 2022 15:28:50 -0500 Subject: [PATCH 17/41] Relaxed test --- gtsam/linear/tests/testGaussianBayesNet.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/linear/tests/testGaussianBayesNet.cpp b/gtsam/linear/tests/testGaussianBayesNet.cpp index 15f0d098a..771a24631 100644 --- a/gtsam/linear/tests/testGaussianBayesNet.cpp +++ b/gtsam/linear/tests/testGaussianBayesNet.cpp @@ -206,14 +206,14 @@ TEST(GaussianBayesNet, MonteCarloIntegration) { gbn.push_back(noisyBayesNet.at(1)); double sum = 0.0; - constexpr size_t N = 500; + constexpr size_t N = 1000; // loop for N samples: for (size_t i = 0; i < N; i++) { const auto X_i = gbn.sample(); sum += pow(X_i[_y_].x() - 5.0, 2.0); } // Expected is variance = 3*3 - EXPECT_DOUBLES_EQUAL(9.0, sum / N, 0.1); + EXPECT_DOUBLES_EQUAL(9.0, sum / N, 0.5); // Pretty high. } /* ************************************************************************* */ From 356b89a165c274c50cf6aae4d9fd9cd48ec1ed4d Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 28 Dec 2022 16:51:24 -0500 Subject: [PATCH 18/41] Fix to const & --- gtsam/linear/GaussianBayesNet.cpp | 3 ++- gtsam/linear/GaussianBayesNet.h | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/gtsam/linear/GaussianBayesNet.cpp b/gtsam/linear/GaussianBayesNet.cpp index 4c1338435..52dc49347 100644 --- a/gtsam/linear/GaussianBayesNet.cpp +++ b/gtsam/linear/GaussianBayesNet.cpp @@ -46,7 +46,8 @@ namespace gtsam { return optimize(solution); } - VectorValues GaussianBayesNet::optimize(VectorValues solution) const { + VectorValues GaussianBayesNet::optimize(const VectorValues& given) const { + VectorValues solution = given; // (R*x)./sigmas = y by solving x=inv(R)*(y.*sigmas) // solve each node in reverse topological sort order (parents first) for (auto cg : boost::adaptors::reverse(*this)) { diff --git a/gtsam/linear/GaussianBayesNet.h b/gtsam/linear/GaussianBayesNet.h index 426d3bd83..c8a28e075 100644 --- a/gtsam/linear/GaussianBayesNet.h +++ b/gtsam/linear/GaussianBayesNet.h @@ -113,7 +113,7 @@ namespace gtsam { VectorValues optimize() const; /// Version of optimize for incomplete BayesNet, given missing variables - VectorValues optimize(const VectorValues given) const; + VectorValues optimize(const VectorValues& given) const; /** * Sample using ancestral sampling From 0495f81104ad32d8c97a32364779812e6c031102 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 28 Dec 2022 16:51:40 -0500 Subject: [PATCH 19/41] Test for GBN::sample --- gtsam/linear/linear.i | 11 ++++++++--- python/gtsam/tests/test_GaussianBayesNet.py | 19 +++++++++++++------ 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/gtsam/linear/linear.i b/gtsam/linear/linear.i index fdf156ff9..f5857b0c5 100644 --- a/gtsam/linear/linear.i +++ b/gtsam/linear/linear.i @@ -490,6 +490,8 @@ virtual class GaussianConditional : gtsam::JacobianFactor { bool equals(const gtsam::GaussianConditional& cg, double tol) const; // Standard Interface + double evaluate(const gtsam::VectorValues& x) const; + double logDensity(const gtsam::VectorValues& x) const; gtsam::Key firstFrontalKey() const; gtsam::VectorValues solve(const gtsam::VectorValues& parents) const; gtsam::JacobianFactor* likelihood( @@ -543,17 +545,20 @@ virtual class GaussianBayesNet { bool equals(const gtsam::GaussianBayesNet& other, double tol) const; size_t size() const; - // Standard interface void push_back(gtsam::GaussianConditional* conditional); void push_back(const gtsam::GaussianBayesNet& bayesNet); gtsam::GaussianConditional* front() const; gtsam::GaussianConditional* back() const; + // Standard interface + double evaluate(const gtsam::VectorValues& x) const; + double logDensity(const gtsam::VectorValues& x) const; + gtsam::VectorValues optimize() const; - gtsam::VectorValues optimize(gtsam::VectorValues given) const; + gtsam::VectorValues optimize(const gtsam::VectorValues& given) const; gtsam::VectorValues optimizeGradientSearch() const; - gtsam::VectorValues sample(gtsam::VectorValues given) const; + gtsam::VectorValues sample(const gtsam::VectorValues& given) const; gtsam::VectorValues sample() const; gtsam::VectorValues backSubstitute(const gtsam::VectorValues& gx) const; gtsam::VectorValues backSubstituteTranspose(const gtsam::VectorValues& gx) const; diff --git a/python/gtsam/tests/test_GaussianBayesNet.py b/python/gtsam/tests/test_GaussianBayesNet.py index e4d396cfe..022de8c3f 100644 --- a/python/gtsam/tests/test_GaussianBayesNet.py +++ b/python/gtsam/tests/test_GaussianBayesNet.py @@ -29,8 +29,7 @@ def smallBayesNet(): """Create a small Bayes Net for testing""" bayesNet = GaussianBayesNet() I_1x1 = np.eye(1, dtype=float) - bayesNet.push_back(GaussianConditional( - _x_, [9.0], I_1x1, _y_, I_1x1)) + bayesNet.push_back(GaussianConditional(_x_, [9.0], I_1x1, _y_, I_1x1)) bayesNet.push_back(GaussianConditional(_y_, [5.0], I_1x1)) return bayesNet @@ -41,13 +40,21 @@ class TestGaussianBayesNet(GtsamTestCase): def test_matrix(self): """Test matrix method""" R, d = smallBayesNet().matrix() # get matrix and RHS - R1 = np.array([ - [1.0, 1.0], - [0.0, 1.0]]) + R1 = np.array([[1.0, 1.0], [0.0, 1.0]]) d1 = np.array([9.0, 5.0]) np.testing.assert_equal(R, R1) np.testing.assert_equal(d, d1) + def test_sample(self): + """Test sample method""" + bayesNet = smallBayesNet() + sample = bayesNet.sample() + self.assertIsInstance(sample, gtsam.VectorValues) -if __name__ == '__main__': + # standard deviation is 1.0 for both, so we set tolerance to 3*sigma + mean = bayesNet.optimize() + self.gtsamAssertEquals(sample, mean, tol=3.0) + + +if __name__ == "__main__": unittest.main() From d9511d6dc2fe746e661ca5b9050cf737ad2d7004 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 28 Dec 2022 17:47:41 -0500 Subject: [PATCH 20/41] Convenience constructors --- gtsam/hybrid/HybridBayesNet.h | 21 ++++++++++++++++++--- gtsam/hybrid/tests/testHybridBayesNet.cpp | 19 +++++++------------ 2 files changed, 25 insertions(+), 15 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 4e41cb11d..488ee0d14 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -69,10 +69,25 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { /// Add HybridConditional to Bayes Net using Base::add; + /// Add a Gaussian Mixture to the Bayes Net. + template + void addMixture(T &&...args) { + push_back(HybridConditional( + boost::make_shared(std::forward(args)...))); + } + + /// Add a Gaussian conditional to the Bayes Net. + template + void addGaussian(T &&...args) { + push_back(HybridConditional( + boost::make_shared(std::forward(args)...))); + } + /// Add a discrete conditional to the Bayes Net. - void add(const DiscreteKey &key, const std::string &table) { - push_back( - HybridConditional(boost::make_shared(key, table))); + template + void addDiscrete(T &&...args) { + push_back(HybridConditional( + boost::make_shared(std::forward(args)...))); } using Base::push_back; diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index d22087f47..8c887a2aa 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -43,7 +43,7 @@ static const DiscreteKey Asia(asiaKey, 2); // Test creation of a pure discrete Bayes net. TEST(HybridBayesNet, Creation) { HybridBayesNet bayesNet; - bayesNet.add(Asia, "99/1"); + bayesNet.addDiscrete(Asia, "99/1"); DiscreteConditional expected(Asia, "99/1"); CHECK(bayesNet.atDiscrete(0)); @@ -54,7 +54,7 @@ TEST(HybridBayesNet, Creation) { // Test adding a Bayes net to another one. TEST(HybridBayesNet, Add) { HybridBayesNet bayesNet; - bayesNet.add(Asia, "99/1"); + bayesNet.addDiscrete(Asia, "99/1"); HybridBayesNet other; other.push_back(bayesNet); @@ -65,7 +65,7 @@ TEST(HybridBayesNet, Add) { // Test evaluate for a pure discrete Bayes net P(Asia). TEST(HybridBayesNet, evaluatePureDiscrete) { HybridBayesNet bayesNet; - bayesNet.add(Asia, "99/1"); + bayesNet.addDiscrete(Asia, "99/1"); HybridValues values; values.insert(asiaKey, 0); EXPECT_DOUBLES_EQUAL(0.99, bayesNet.evaluate(values), 1e-9); @@ -85,17 +85,12 @@ TEST(HybridBayesNet, evaluateHybrid) { conditional1 = boost::make_shared( X(1), Vector1::Constant(2), I_1x1, model1); - // TODO(dellaert): creating and adding mixture is clumsy. - const auto mixture = GaussianMixture::FromConditionals( - {X(1)}, {}, {Asia}, {conditional0, conditional1}); - // Create hybrid Bayes net. HybridBayesNet bayesNet; - bayesNet.push_back(HybridConditional( - boost::make_shared(continuousConditional))); - bayesNet.push_back( - HybridConditional(boost::make_shared(mixture))); - bayesNet.add(Asia, "99/1"); + bayesNet.addGaussian(continuousConditional); + bayesNet.addMixture(GaussianMixture::FromConditionals( + {X(1)}, {}, {Asia}, {conditional0, conditional1})); + bayesNet.addDiscrete(Asia, "99/1"); // Create values at which to evaluate. HybridValues values; From 1de49598afd398fc061f095bd8f7a6848b75e741 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 28 Dec 2022 18:05:46 -0500 Subject: [PATCH 21/41] Add methods in HybridBayesNet --- gtsam/hybrid/hybrid.i | 16 +++++++++++++++- python/gtsam/preamble/hybrid.h | 1 + python/gtsam/specializations/hybrid.h | 4 +++- 3 files changed, 19 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index 899c129e0..acda94638 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -87,7 +87,6 @@ class HybridBayesTreeClique { // double evaluate(const gtsam::HybridValues& values) const; }; -#include class HybridBayesTree { HybridBayesTree(); void print(string s = "HybridBayesTree\n", @@ -105,14 +104,29 @@ class HybridBayesTree { gtsam::DefaultKeyFormatter) const; }; +#include class HybridBayesNet { HybridBayesNet(); void add(const gtsam::HybridConditional& s); + void addMixture(const gtsam::GaussianMixture& s); + void addGaussian(const gtsam::GaussianConditional& s); + void addDiscrete(const gtsam::DiscreteConditional& s); + void addDiscrete(const gtsam::DiscreteKey& key, string spec); + void addDiscrete(const gtsam::DiscreteKey& key, + const gtsam::DiscreteKeys& parents, string spec); + void addDiscrete(const gtsam::DiscreteKey& key, + const std::vector& parents, string spec); + bool empty() const; size_t size() const; gtsam::KeySet keys() const; const gtsam::HybridConditional* at(size_t i) const; + + double evaluate(const gtsam::HybridValues& x) const; gtsam::HybridValues optimize() const; + gtsam::HybridValues sample(const gtsam::HybridValues &given) const; + gtsam::HybridValues sample() const; + void print(string s = "HybridBayesNet\n", const gtsam::KeyFormatter& keyFormatter = gtsam::DefaultKeyFormatter) const; diff --git a/python/gtsam/preamble/hybrid.h b/python/gtsam/preamble/hybrid.h index bae636d6a..7008a1f07 100644 --- a/python/gtsam/preamble/hybrid.h +++ b/python/gtsam/preamble/hybrid.h @@ -19,3 +19,4 @@ PYBIND11_MAKE_OPAQUE(std::vector); #endif PYBIND11_MAKE_OPAQUE(std::vector); +PYBIND11_MAKE_OPAQUE(std::vector); diff --git a/python/gtsam/specializations/hybrid.h b/python/gtsam/specializations/hybrid.h index bede6d86c..db5240117 100644 --- a/python/gtsam/specializations/hybrid.h +++ b/python/gtsam/specializations/hybrid.h @@ -1,4 +1,6 @@ py::bind_vector >(m_, "GaussianFactorVector"); - py::implicitly_convertible >(); + +py::bind_vector >(m_, "GaussianConditionalVector"); +py::implicitly_convertible >(); From fd12181ebe41d5243febb09af1f77c23dc4596f6 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 28 Dec 2022 18:10:00 -0500 Subject: [PATCH 22/41] Cleanup --- python/gtsam/tests/test_HybridValues.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/python/gtsam/tests/test_HybridValues.py b/python/gtsam/tests/test_HybridValues.py index 63e7c8e7d..8a54d518c 100644 --- a/python/gtsam/tests/test_HybridValues.py +++ b/python/gtsam/tests/test_HybridValues.py @@ -1,5 +1,5 @@ """ -GTSAM Copyright 2010-2019, Georgia Tech Research Corporation, +GTSAM Copyright 2010-2022, Georgia Tech Research Corporation, Atlanta, Georgia 30332-0415 All Rights Reserved @@ -20,22 +20,23 @@ from gtsam.symbol_shorthand import C, X from gtsam.utils.test_case import GtsamTestCase -class TestHybridGaussianFactorGraph(GtsamTestCase): +class TestHybridValues(GtsamTestCase): """Unit tests for HybridValues.""" def test_basic(self): - """Test contruction and basic methods of hybrid values.""" - + """Test construction and basic methods of hybrid values.""" + hv1 = gtsam.HybridValues() - hv1.insert(X(0), np.ones((3,1))) + hv1.insert(X(0), np.ones((3, 1))) hv1.insert(C(0), 2) hv2 = gtsam.HybridValues() hv2.insert(C(0), 2) - hv2.insert(X(0), np.ones((3,1))) + hv2.insert(X(0), np.ones((3, 1))) self.assertEqual(hv1.atDiscrete(C(0)), 2) - self.assertEqual(hv1.at(X(0))[0], np.ones((3,1))[0]) + self.assertEqual(hv1.at(X(0))[0], np.ones((3, 1))[0]) + if __name__ == "__main__": unittest.main() From 7c91fe82b4963389bd8cb23f224ab6617af3a5a5 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 28 Dec 2022 18:10:07 -0500 Subject: [PATCH 23/41] Add evaluate test --- python/gtsam/tests/test_HybridBayesNet.py | 66 +++++++++++++++++++++++ 1 file changed, 66 insertions(+) create mode 100644 python/gtsam/tests/test_HybridBayesNet.py diff --git a/python/gtsam/tests/test_HybridBayesNet.py b/python/gtsam/tests/test_HybridBayesNet.py new file mode 100644 index 000000000..cfe080dcb --- /dev/null +++ b/python/gtsam/tests/test_HybridBayesNet.py @@ -0,0 +1,66 @@ +""" +GTSAM Copyright 2010-2022, Georgia Tech Research Corporation, +Atlanta, Georgia 30332-0415 +All Rights Reserved + +See LICENSE for the license information + +Unit tests for Hybrid Values. +Author: Frank Dellaert +""" +# pylint: disable=invalid-name, no-name-in-module, no-member + +from __future__ import print_function + +import unittest + +import numpy as np +from gtsam.symbol_shorthand import A, X +from gtsam.utils.test_case import GtsamTestCase + +import gtsam +from gtsam import GaussianConditional, GaussianMixture, HybridBayesNet, HybridValues, noiseModel + + +class TestHybridBayesNet(GtsamTestCase): + """Unit tests for HybridValues.""" + + def test_evaluate(self): + """Test evaluate for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia).""" + asiaKey = A(0) + Asia = (asiaKey, 2) + + # Create the continuous conditional + I_1x1 = np.eye(1) + gc = GaussianConditional.FromMeanAndStddev(X(0), 2 * I_1x1, X(1), [-4], 5.0) + + # Create the noise models + model0 = noiseModel.Diagonal.Sigmas([2.0]) + model1 = noiseModel.Diagonal.Sigmas([3.0]) + + # Create the conditionals + conditional0 = GaussianConditional(X(1), [5], I_1x1, model0) + conditional1 = GaussianConditional(X(1), [2], I_1x1, model1) + # gm = GaussianMixture.FromConditionals([X(1)], [], [Asia], [conditional0, conditional1]) # + + # Create hybrid Bayes net. + bayesNet = HybridBayesNet() + bayesNet.addGaussian(gc) + # bayesNet.addMixture(gm) + bayesNet.addDiscrete(Asia, "99/1") + + # Create values at which to evaluate. + values = HybridValues() + values.insert(asiaKey, 0) + values.insert(X(0), [-6]) + values.insert(X(1), [1]) + + conditionalProbability = gc.evaluate(values.continuous()) + mixtureProbability = conditional0.evaluate(values.continuous()) + assert self.assertAlmostEqual( + conditionalProbability * mixtureProbability * 0.99, bayesNet.evaluate(values), places=5 + ) + + +if __name__ == "__main__": + unittest.main() From 873f5baf562eb6d789635091593090621d5f5d12 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Dec 2022 08:32:43 +0530 Subject: [PATCH 24/41] remove unnecessary preamble and specializations for hybrid wrapping --- python/gtsam/preamble/hybrid.h | 9 --------- python/gtsam/specializations/hybrid.h | 5 ----- 2 files changed, 14 deletions(-) diff --git a/python/gtsam/preamble/hybrid.h b/python/gtsam/preamble/hybrid.h index 7008a1f07..661215e59 100644 --- a/python/gtsam/preamble/hybrid.h +++ b/python/gtsam/preamble/hybrid.h @@ -11,12 +11,3 @@ * mutations on Python side will not be reflected on C++. */ #include - -#ifdef GTSAM_ALLOCATOR_TBB -PYBIND11_MAKE_OPAQUE(std::vector>); -#else -PYBIND11_MAKE_OPAQUE(std::vector); -#endif - -PYBIND11_MAKE_OPAQUE(std::vector); -PYBIND11_MAKE_OPAQUE(std::vector); diff --git a/python/gtsam/specializations/hybrid.h b/python/gtsam/specializations/hybrid.h index db5240117..8b1378917 100644 --- a/python/gtsam/specializations/hybrid.h +++ b/python/gtsam/specializations/hybrid.h @@ -1,6 +1 @@ -py::bind_vector >(m_, "GaussianFactorVector"); -py::implicitly_convertible >(); - -py::bind_vector >(m_, "GaussianConditionalVector"); -py::implicitly_convertible >(); From 03baf8f75ed0aba3c971b2da5ad58460666d000d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Dec 2022 08:33:14 +0530 Subject: [PATCH 25/41] formatting and fixes to test --- python/gtsam/tests/test_HybridBayesNet.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/python/gtsam/tests/test_HybridBayesNet.py b/python/gtsam/tests/test_HybridBayesNet.py index cfe080dcb..13ac3a3e2 100644 --- a/python/gtsam/tests/test_HybridBayesNet.py +++ b/python/gtsam/tests/test_HybridBayesNet.py @@ -10,8 +10,6 @@ Author: Frank Dellaert """ # pylint: disable=invalid-name, no-name-in-module, no-member -from __future__ import print_function - import unittest import numpy as np @@ -19,12 +17,12 @@ from gtsam.symbol_shorthand import A, X from gtsam.utils.test_case import GtsamTestCase import gtsam -from gtsam import GaussianConditional, GaussianMixture, HybridBayesNet, HybridValues, noiseModel +from gtsam import (DiscreteKeys, GaussianConditional, GaussianMixture, + HybridBayesNet, HybridValues, noiseModel) class TestHybridBayesNet(GtsamTestCase): """Unit tests for HybridValues.""" - def test_evaluate(self): """Test evaluate for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia).""" asiaKey = A(0) @@ -32,7 +30,8 @@ class TestHybridBayesNet(GtsamTestCase): # Create the continuous conditional I_1x1 = np.eye(1) - gc = GaussianConditional.FromMeanAndStddev(X(0), 2 * I_1x1, X(1), [-4], 5.0) + gc = GaussianConditional.FromMeanAndStddev(X(0), 2 * I_1x1, X(1), [-4], + 5.0) # Create the noise models model0 = noiseModel.Diagonal.Sigmas([2.0]) @@ -41,7 +40,10 @@ class TestHybridBayesNet(GtsamTestCase): # Create the conditionals conditional0 = GaussianConditional(X(1), [5], I_1x1, model0) conditional1 = GaussianConditional(X(1), [2], I_1x1, model1) - # gm = GaussianMixture.FromConditionals([X(1)], [], [Asia], [conditional0, conditional1]) # + dkeys = DiscreteKeys() + dkeys.push_back(Asia) + gm = GaussianMixture.FromConditionals([X(1)], [], dkeys, + [conditional0, conditional1]) # # Create hybrid Bayes net. bayesNet = HybridBayesNet() @@ -57,9 +59,10 @@ class TestHybridBayesNet(GtsamTestCase): conditionalProbability = gc.evaluate(values.continuous()) mixtureProbability = conditional0.evaluate(values.continuous()) - assert self.assertAlmostEqual( - conditionalProbability * mixtureProbability * 0.99, bayesNet.evaluate(values), places=5 - ) + assert self.assertAlmostEqual(conditionalProbability * + mixtureProbability * 0.99, + bayesNet.evaluate(values), + places=5) if __name__ == "__main__": From f4420f2c8dbed17efe506105485a1a5572a67290 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Dec 2022 08:40:46 +0530 Subject: [PATCH 26/41] add mixture to bayesnet and fix double assert bug --- python/gtsam/tests/test_HybridBayesNet.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/python/gtsam/tests/test_HybridBayesNet.py b/python/gtsam/tests/test_HybridBayesNet.py index 13ac3a3e2..66cddf05e 100644 --- a/python/gtsam/tests/test_HybridBayesNet.py +++ b/python/gtsam/tests/test_HybridBayesNet.py @@ -48,7 +48,7 @@ class TestHybridBayesNet(GtsamTestCase): # Create hybrid Bayes net. bayesNet = HybridBayesNet() bayesNet.addGaussian(gc) - # bayesNet.addMixture(gm) + bayesNet.addMixture(gm) bayesNet.addDiscrete(Asia, "99/1") # Create values at which to evaluate. @@ -59,10 +59,10 @@ class TestHybridBayesNet(GtsamTestCase): conditionalProbability = gc.evaluate(values.continuous()) mixtureProbability = conditional0.evaluate(values.continuous()) - assert self.assertAlmostEqual(conditionalProbability * - mixtureProbability * 0.99, - bayesNet.evaluate(values), - places=5) + self.assertAlmostEqual(conditionalProbability * mixtureProbability * + 0.99, + bayesNet.evaluate(values), + places=5) if __name__ == "__main__": From cc2183adb32fc0c10a9fc98895dba782dea603bd Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Dec 2022 10:32:46 +0530 Subject: [PATCH 27/41] fix wrap preamble --- python/gtsam/preamble/hybrid.h | 7 +++++++ python/gtsam/specializations/hybrid.h | 1 - 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/python/gtsam/preamble/hybrid.h b/python/gtsam/preamble/hybrid.h index 661215e59..90a062d66 100644 --- a/python/gtsam/preamble/hybrid.h +++ b/python/gtsam/preamble/hybrid.h @@ -11,3 +11,10 @@ * mutations on Python side will not be reflected on C++. */ #include + +// NOTE: Needed since we are including pybind11/stl.h. +#ifdef GTSAM_ALLOCATOR_TBB +PYBIND11_MAKE_OPAQUE(std::vector>); +#else +PYBIND11_MAKE_OPAQUE(std::vector); +#endif diff --git a/python/gtsam/specializations/hybrid.h b/python/gtsam/specializations/hybrid.h index 8b1378917..e69de29bb 100644 --- a/python/gtsam/specializations/hybrid.h +++ b/python/gtsam/specializations/hybrid.h @@ -1 +0,0 @@ - From 1eb6fc77a0e22d004c4cf0288d0e3eddd8fed192 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Dec 2022 10:33:33 +0530 Subject: [PATCH 28/41] fix formatting and other issues --- python/gtsam/tests/test_HybridFactorGraph.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index 576efa82f..37bb5b93c 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -10,21 +10,19 @@ Author: Fan Jiang """ # pylint: disable=invalid-name, no-name-in-module, no-member -from __future__ import print_function - import unittest -import gtsam import numpy as np from gtsam.symbol_shorthand import C, X from gtsam.utils.test_case import GtsamTestCase +import gtsam + class TestHybridGaussianFactorGraph(GtsamTestCase): """Unit tests for HybridGaussianFactorGraph.""" - def test_create(self): - """Test contruction of hybrid factor graph.""" + """Test construction of hybrid factor graph.""" noiseModel = gtsam.noiseModel.Unit.Create(3) dk = gtsam.DiscreteKeys() dk.push_back((C(0), 2)) @@ -45,7 +43,6 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): gtsam.Ordering.ColamdConstrainedLastHybridGaussianFactorGraph( hfg, [C(0)])) - # print("hbn = ", hbn) self.assertEqual(hbn.size(), 2) mixture = hbn.at(0).inner() @@ -56,7 +53,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): self.assertIsInstance(discrete_conditional, gtsam.DiscreteConditional) def test_optimize(self): - """Test contruction of hybrid factor graph.""" + """Test construction of hybrid factor graph.""" noiseModel = gtsam.noiseModel.Unit.Create(3) dk = gtsam.DiscreteKeys() dk.push_back((C(0), 2)) @@ -73,16 +70,16 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): hfg.add(jf2) hfg.push_back(gmf) - dtf = gtsam.DecisionTreeFactor([(C(0), 2)],"0 1") + dtf = gtsam.DecisionTreeFactor([(C(0), 2)], "0 1") hfg.add(dtf) hbn = hfg.eliminateSequential( gtsam.Ordering.ColamdConstrainedLastHybridGaussianFactorGraph( hfg, [C(0)])) - # print("hbn = ", hbn) hv = hbn.optimize() self.assertEqual(hv.atDiscrete(C(0)), 1) + if __name__ == "__main__": unittest.main() From 1659c418e154bfb85a3a7d2781aaf0374d21ec88 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Dec 2022 21:31:46 +0530 Subject: [PATCH 29/41] PreintegratedAhrsMeasurements wrapped constructors --- gtsam/navigation/navigation.i | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/gtsam/navigation/navigation.i b/gtsam/navigation/navigation.i index 731cf3807..7bbef9fc5 100644 --- a/gtsam/navigation/navigation.i +++ b/gtsam/navigation/navigation.i @@ -216,7 +216,13 @@ virtual class CombinedImuFactor: gtsam::NonlinearFactor { #include class PreintegratedAhrsMeasurements { // Standard Constructor - PreintegratedAhrsMeasurements(Vector bias, Matrix measuredOmegaCovariance); + PreintegratedAhrsMeasurements(const gtsam::PreintegrationParams* params, + const Vector& biasHat); + PreintegratedAhrsMeasurements(const gtsam::PreintegrationParams* p, + const Vector& bias_hat, double deltaTij, + const gtsam::Rot3& deltaRij, + const Matrix& delRdelBiasOmega, + const Matrix& preint_meas_cov); PreintegratedAhrsMeasurements(const gtsam::PreintegratedAhrsMeasurements& rhs); // Testable From a6b90023f38f0f8b5650c404401333a7d0c8eb42 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 29 Dec 2022 11:54:59 -0500 Subject: [PATCH 30/41] Added zero parents FromMeanAndStddev --- gtsam/linear/GaussianConditional.cpp | 15 +++++++++++++-- gtsam/linear/GaussianConditional.h | 9 +++++++-- gtsam/linear/linear.i | 4 ++++ 3 files changed, 24 insertions(+), 4 deletions(-) diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index 5e8a193cf..7cdff914f 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -63,13 +63,24 @@ namespace gtsam { : BaseFactor(key, R, parent1, S, parent2, T, d, sigmas), BaseConditional(1) {} + /* ************************************************************************ */ + GaussianConditional GaussianConditional::FromMeanAndStddev(Key key, + const Vector& mu, + double sigma) { + // |Rx - d| = |x-(Ay + b)|/sigma + const Matrix R = Matrix::Identity(mu.size(), mu.size()); + const Vector& d = mu; + return GaussianConditional(key, d, R, + noiseModel::Isotropic::Sigma(mu.size(), sigma)); + } + /* ************************************************************************ */ GaussianConditional GaussianConditional::FromMeanAndStddev( Key key, const Matrix& A, Key parent, const Vector& b, double sigma) { // |Rx + Sy - d| = |x-(Ay + b)|/sigma const Matrix R = Matrix::Identity(b.size(), b.size()); const Matrix S = -A; - const Vector d = b; + const Vector& d = b; return GaussianConditional(key, d, R, parent, S, noiseModel::Isotropic::Sigma(b.size(), sigma)); } @@ -82,7 +93,7 @@ namespace gtsam { const Matrix R = Matrix::Identity(b.size(), b.size()); const Matrix S = -A1; const Matrix T = -A2; - const Vector d = b; + const Vector& d = b; return GaussianConditional(key, d, R, parent1, S, parent2, T, noiseModel::Isotropic::Sigma(b.size(), sigma)); } diff --git a/gtsam/linear/GaussianConditional.h b/gtsam/linear/GaussianConditional.h index a72a73973..af1c5d80e 100644 --- a/gtsam/linear/GaussianConditional.h +++ b/gtsam/linear/GaussianConditional.h @@ -84,12 +84,17 @@ namespace gtsam { const KEYS& keys, size_t nrFrontals, const VerticalBlockMatrix& augmentedMatrix, const SharedDiagonal& sigmas = SharedDiagonal()); - /// Construct from mean A1 p1 + b and standard deviation. + /// Construct from mean `mu` and standard deviation `sigma`. + static GaussianConditional FromMeanAndStddev(Key key, const Vector& mu, + double sigma); + + /// Construct from conditional mean `A1 p1 + b` and standard deviation. static GaussianConditional FromMeanAndStddev(Key key, const Matrix& A, Key parent, const Vector& b, double sigma); - /// Construct from mean A1 p1 + A2 p2 + b and standard deviation. + /// Construct from conditional mean `A1 p1 + A2 p2 + b` and standard + /// deviation `sigma`. static GaussianConditional FromMeanAndStddev(Key key, // const Matrix& A1, Key parent1, const Matrix& A2, Key parent2, diff --git a/gtsam/linear/linear.i b/gtsam/linear/linear.i index f5857b0c5..6f241da55 100644 --- a/gtsam/linear/linear.i +++ b/gtsam/linear/linear.i @@ -470,6 +470,10 @@ virtual class GaussianConditional : gtsam::JacobianFactor { size_t name2, Matrix T); // Named constructors + static gtsam::GaussianConditional FromMeanAndStddev(gtsam::Key key, + const Vector& mu, + double sigma); + static gtsam::GaussianConditional FromMeanAndStddev(gtsam::Key key, const Matrix& A, gtsam::Key parent, From 7d58207dae808697d9812b88f8c101f62e1184af Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 29 Dec 2022 11:55:19 -0500 Subject: [PATCH 31/41] Renamed add to emplace, add is for shared pointers --- gtsam/hybrid/HybridBayesNet.h | 21 ++++++++++++++++--- gtsam/hybrid/hybrid.i | 25 +++++++++++++++-------- gtsam/hybrid/tests/testHybridBayesNet.cpp | 12 +++++------ 3 files changed, 41 insertions(+), 17 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 488ee0d14..a64b3bb4f 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -69,23 +69,38 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { /// Add HybridConditional to Bayes Net using Base::add; + /// Add a Gaussian Mixture to the Bayes Net. + void addMixture(const GaussianMixture::shared_ptr &ptr) { + push_back(HybridConditional(ptr)); + } + + /// Add a Gaussian conditional to the Bayes Net. + void addGaussian(const GaussianConditional::shared_ptr &ptr) { + push_back(HybridConditional(ptr)); + } + + /// Add a discrete conditional to the Bayes Net. + void addDiscrete(const DiscreteConditional::shared_ptr &ptr) { + push_back(HybridConditional(ptr)); + } + /// Add a Gaussian Mixture to the Bayes Net. template - void addMixture(T &&...args) { + void emplaceMixture(T &&...args) { push_back(HybridConditional( boost::make_shared(std::forward(args)...))); } /// Add a Gaussian conditional to the Bayes Net. template - void addGaussian(T &&...args) { + void emplaceGaussian(T &&...args) { push_back(HybridConditional( boost::make_shared(std::forward(args)...))); } /// Add a discrete conditional to the Bayes Net. template - void addDiscrete(T &&...args) { + void emplaceDiscrete(T &&...args) { push_back(HybridConditional( boost::make_shared(std::forward(args)...))); } diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index acda94638..ab070c68f 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -108,14 +108,23 @@ class HybridBayesTree { class HybridBayesNet { HybridBayesNet(); void add(const gtsam::HybridConditional& s); - void addMixture(const gtsam::GaussianMixture& s); - void addGaussian(const gtsam::GaussianConditional& s); - void addDiscrete(const gtsam::DiscreteConditional& s); - void addDiscrete(const gtsam::DiscreteKey& key, string spec); - void addDiscrete(const gtsam::DiscreteKey& key, - const gtsam::DiscreteKeys& parents, string spec); - void addDiscrete(const gtsam::DiscreteKey& key, - const std::vector& parents, string spec); + void addMixture(const gtsam::GaussianMixture* s); + void addGaussian(const gtsam::GaussianConditional* s); + void addDiscrete(const gtsam::DiscreteConditional* s); + + void emplaceMixture(const gtsam::GaussianMixture& s); + void emplaceGaussian(const gtsam::GaussianConditional& s); + void emplaceDiscrete(const gtsam::DiscreteConditional& s); + void emplaceDiscrete(const gtsam::DiscreteKey& key, string spec); + void emplaceDiscrete(const gtsam::DiscreteKey& key, + const gtsam::DiscreteKeys& parents, string spec); + void emplaceDiscrete(const gtsam::DiscreteKey& key, + const std::vector& parents, + string spec); + + gtsam::GaussianMixture* atMixture(size_t i) const; + gtsam::GaussianConditional* atGaussian(size_t i) const; + gtsam::DiscreteConditional* atDiscrete(size_t i) const; bool empty() const; size_t size() const; diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 8c887a2aa..9a2ca1208 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -43,7 +43,7 @@ static const DiscreteKey Asia(asiaKey, 2); // Test creation of a pure discrete Bayes net. TEST(HybridBayesNet, Creation) { HybridBayesNet bayesNet; - bayesNet.addDiscrete(Asia, "99/1"); + bayesNet.emplaceDiscrete(Asia, "99/1"); DiscreteConditional expected(Asia, "99/1"); CHECK(bayesNet.atDiscrete(0)); @@ -54,7 +54,7 @@ TEST(HybridBayesNet, Creation) { // Test adding a Bayes net to another one. TEST(HybridBayesNet, Add) { HybridBayesNet bayesNet; - bayesNet.addDiscrete(Asia, "99/1"); + bayesNet.emplaceDiscrete(Asia, "99/1"); HybridBayesNet other; other.push_back(bayesNet); @@ -65,7 +65,7 @@ TEST(HybridBayesNet, Add) { // Test evaluate for a pure discrete Bayes net P(Asia). TEST(HybridBayesNet, evaluatePureDiscrete) { HybridBayesNet bayesNet; - bayesNet.addDiscrete(Asia, "99/1"); + bayesNet.emplaceDiscrete(Asia, "99/1"); HybridValues values; values.insert(asiaKey, 0); EXPECT_DOUBLES_EQUAL(0.99, bayesNet.evaluate(values), 1e-9); @@ -87,10 +87,10 @@ TEST(HybridBayesNet, evaluateHybrid) { // Create hybrid Bayes net. HybridBayesNet bayesNet; - bayesNet.addGaussian(continuousConditional); - bayesNet.addMixture(GaussianMixture::FromConditionals( + bayesNet.emplaceGaussian(continuousConditional); + bayesNet.emplaceMixture(GaussianMixture::FromConditionals( {X(1)}, {}, {Asia}, {conditional0, conditional1})); - bayesNet.addDiscrete(Asia, "99/1"); + bayesNet.emplaceDiscrete(Asia, "99/1"); // Create values at which to evaluate. HybridValues values; From e42805eba3dcab0864cb48164c339b3b512e850d Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 29 Dec 2022 12:22:56 -0500 Subject: [PATCH 32/41] Get rid of (redundant and undocumented) FromFactors named constructor. --- gtsam/hybrid/GaussianMixtureFactor.cpp | 9 ---- gtsam/hybrid/GaussianMixtureFactor.h | 11 ++--- gtsam/hybrid/hybrid.i | 2 +- gtsam/hybrid/tests/Switching.h | 2 +- gtsam/hybrid/tests/testGaussianMixture.cpp | 41 ++++++++++++++++++- .../tests/testHybridGaussianFactorGraph.cpp | 4 +- 6 files changed, 48 insertions(+), 21 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index fd437f52c..881a97a1b 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -38,15 +38,6 @@ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { return e != nullptr && Base::equals(*e, tol); } -/* *******************************************************************************/ -GaussianMixtureFactor GaussianMixtureFactor::FromFactors( - const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const std::vector &factors) { - Factors dt(discreteKeys, factors); - - return GaussianMixtureFactor(continuousKeys, discreteKeys, dt); -} - /* *******************************************************************************/ void GaussianMixtureFactor::print(const std::string &s, const KeyFormatter &formatter) const { diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 0b65b5aa9..b8f475de3 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -93,19 +93,16 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @brief Construct a new GaussianMixtureFactor object using a vector of * GaussianFactor shared pointers. * - * @param keys Vector of keys for continuous factors. + * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. */ - GaussianMixtureFactor(const KeyVector &keys, const DiscreteKeys &discreteKeys, + GaussianMixtureFactor(const KeyVector &continuousKeys, + const DiscreteKeys &discreteKeys, const std::vector &factors) - : GaussianMixtureFactor(keys, discreteKeys, + : GaussianMixtureFactor(continuousKeys, discreteKeys, Factors(discreteKeys, factors)) {} - static This FromFactors( - const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const std::vector &factors); - /// @} /// @name Testable /// @{ diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index ab070c68f..2721612f9 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -54,7 +54,7 @@ virtual class HybridDiscreteFactor { #include class GaussianMixtureFactor : gtsam::HybridFactor { - static GaussianMixtureFactor FromFactors( + GaussianMixtureFactor( const gtsam::KeyVector& continuousKeys, const gtsam::DiscreteKeys& discreteKeys, const std::vector& factorsList); diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index f9e1916d0..59c57f8a0 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -57,7 +57,7 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( // keyFunc(1) to keyFunc(n+1) for (size_t t = 1; t < n; t++) { - hfg.add(GaussianMixtureFactor::FromFactors( + hfg.add(GaussianMixtureFactor( {keyFunc(t), keyFunc(t + 1)}, {{dKeyFunc(t), 2}}, {boost::make_shared(keyFunc(t), I_3x3, keyFunc(t + 1), I_3x3, Z_3x1), diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index 310081f02..fe6a57dee 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -20,6 +20,8 @@ #include #include +#include +#include #include #include @@ -33,6 +35,7 @@ using namespace gtsam; using noiseModel::Isotropic; using symbol_shorthand::M; using symbol_shorthand::X; +using symbol_shorthand::Z; /* ************************************************************************* */ /* Check construction of GaussianMixture P(x1 | x2, m1) as well as accessing a @@ -127,7 +130,43 @@ TEST(GaussianMixture, Error) { assignment[M(1)] = 0; EXPECT_DOUBLES_EQUAL(0.5, mixture.error(values, assignment), 1e-8); assignment[M(1)] = 1; - EXPECT_DOUBLES_EQUAL(4.3252595155709335, mixture.error(values, assignment), 1e-8); + EXPECT_DOUBLES_EQUAL(4.3252595155709335, mixture.error(values, assignment), + 1e-8); +} + +/* ************************************************************************* */ +// Create a likelihood factor for a Gaussian mixture, return a Mixture factor on +// the parents. +GaussianMixtureFactor::shared_ptr likelihood(const HybridValues& values) { + GaussianMixtureFactor::shared_ptr factor; + return factor; +} + +/// Check that likelihood returns a mixture factor on the parents. +TEST(GaussianMixture, Likelihood) { + // Create mode key: 0 is low-noise, 1 is high-noise. + Key modeKey = M(0); + DiscreteKey mode(modeKey, 2); + + // Create Gaussian mixture Z(0) = X(0) + noise. + // TODO(dellaert): making copies below is not ideal ! + Matrix1 I = Matrix1::Identity(); + const auto conditional0 = boost::make_shared( + GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 0.5)); + const auto conditional1 = boost::make_shared( + GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3)); + const auto gm = GaussianMixture::FromConditionals( + {Z(0)}, {X(0)}, {mode}, {conditional0, conditional1}); + + // Call the likelihood function: + VectorValues measurements; + measurements.insert(Z(0), Vector1(0)); + HybridValues values(DiscreteValues(), measurements); + const auto factor = likelihood(values); + + // Check that the factor is a mixture factor on the parents. + const GaussianMixtureFactor expected = GaussianMixtureFactor(); + EXPECT(assert_equal(*factor, expected)); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 55e4c28ad..f774e8ef1 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -176,7 +176,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - hfg.add(GaussianMixtureFactor::FromFactors( + hfg.add(GaussianMixtureFactor( {X(1)}, {{M(1), 2}}, {boost::make_shared(X(1), I_3x3, Z_3x1), boost::make_shared(X(1), I_3x3, Vector3::Ones())})); @@ -235,7 +235,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1)); { - hfg.add(GaussianMixtureFactor::FromFactors( + hfg.add(GaussianMixtureFactor( {X(0)}, {{M(0), 2}}, {boost::make_shared(X(0), I_3x3, Z_3x1), boost::make_shared(X(0), I_3x3, Vector3::Ones())})); From 1e87a81d010dec84bfc6c1608a044149e7dd6a1a Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 29 Dec 2022 13:12:03 -0500 Subject: [PATCH 33/41] Made method const --- gtsam/hybrid/GaussianMixture.cpp | 2 +- gtsam/hybrid/GaussianMixture.h | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index c0815b2d7..82d16226a 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -36,7 +36,7 @@ GaussianMixture::GaussianMixture( conditionals_(conditionals) {} /* *******************************************************************************/ -const GaussianMixture::Conditionals &GaussianMixture::conditionals() { +const GaussianMixture::Conditionals &GaussianMixture::conditionals() const { return conditionals_; } diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index 88d5a02c0..a3393dbb0 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -142,7 +142,7 @@ class GTSAM_EXPORT GaussianMixture /// @} /// Getter for the underlying Conditionals DecisionTree - const Conditionals &conditionals(); + const Conditionals &conditionals() const; /** * @brief Compute error of the GaussianMixture as a tree. From 364417e4aaf90a3727bbf9def41402581ed1c4e9 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 29 Dec 2022 13:12:12 -0500 Subject: [PATCH 34/41] Fixed equals and print --- gtsam/hybrid/GaussianMixtureFactor.cpp | 42 ++++++++++++++++++-------- 1 file changed, 29 insertions(+), 13 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 881a97a1b..32ca1432c 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -35,7 +35,19 @@ GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { const This *e = dynamic_cast(&lf); - return e != nullptr && Base::equals(*e, tol); + if (e == nullptr) return false; + + // This will return false if either factors_ is empty or e->factors_ is empty, + // but not if both are empty or both are not empty: + if (factors_.empty() ^ e->factors_.empty()) return false; + + // Check the base and the factors: + return Base::equals(*e, tol) && + factors_.equals(e->factors_, + [tol](const GaussianFactor::shared_ptr &f1, + const GaussianFactor::shared_ptr &f2) { + return f1->equals(*f2, tol); + }); } /* *******************************************************************************/ @@ -43,18 +55,22 @@ void GaussianMixtureFactor::print(const std::string &s, const KeyFormatter &formatter) const { HybridFactor::print(s, formatter); std::cout << "{\n"; - factors_.print( - "", [&](Key k) { return formatter(k); }, - [&](const GaussianFactor::shared_ptr &gf) -> std::string { - RedirectCout rd; - std::cout << ":\n"; - if (gf && !gf->empty()) { - gf->print("", formatter); - return rd.str(); - } else { - return "nullptr"; - } - }); + if (factors_.empty()) { + std::cout << " empty" << std::endl; + } else { + factors_.print( + "", [&](Key k) { return formatter(k); }, + [&](const GaussianFactor::shared_ptr &gf) -> std::string { + RedirectCout rd; + std::cout << ":\n"; + if (gf && !gf->empty()) { + gf->print("", formatter); + return rd.str(); + } else { + return "nullptr"; + } + }); + } std::cout << "}" << std::endl; } From 611f61c7f4189c668326073f66185e47cd9b2fc3 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 29 Dec 2022 13:21:20 -0500 Subject: [PATCH 35/41] proto code for likelihood --- gtsam/hybrid/tests/testGaussianMixture.cpp | 85 ++++++++++++++++++---- 1 file changed, 70 insertions(+), 15 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index fe6a57dee..5542d86a9 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -135,19 +135,12 @@ TEST(GaussianMixture, Error) { } /* ************************************************************************* */ -// Create a likelihood factor for a Gaussian mixture, return a Mixture factor on -// the parents. -GaussianMixtureFactor::shared_ptr likelihood(const HybridValues& values) { - GaussianMixtureFactor::shared_ptr factor; - return factor; -} - -/// Check that likelihood returns a mixture factor on the parents. -TEST(GaussianMixture, Likelihood) { - // Create mode key: 0 is low-noise, 1 is high-noise. - Key modeKey = M(0); - DiscreteKey mode(modeKey, 2); +// Create mode key: 0 is low-noise, 1 is high-noise. +static const Key modeKey = M(0); +static const DiscreteKey mode(modeKey, 2); +// Create a simple GaussianMixture +static GaussianMixture createSimpleGaussianMixture() { // Create Gaussian mixture Z(0) = X(0) + noise. // TODO(dellaert): making copies below is not ideal ! Matrix1 I = Matrix1::Identity(); @@ -157,15 +150,77 @@ TEST(GaussianMixture, Likelihood) { GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3)); const auto gm = GaussianMixture::FromConditionals( {Z(0)}, {X(0)}, {mode}, {conditional0, conditional1}); + return gm; +} + +/* ************************************************************************* */ +std::set DiscreteKeysAsSet(const DiscreteKeys& dkeys) { + std::set s; + s.insert(dkeys.begin(), dkeys.end()); + return s; +} + +// Get only the continuous parent keys as a KeyVector: +KeyVector continuousParents(const GaussianMixture& gm) { + // Get all parent keys: + const auto range = gm.parents(); + KeyVector continuousParentKeys(range.begin(), range.end()); + // Loop over all discrete keys: + for (const auto& discreteKey : gm.discreteKeys()) { + const Key key = discreteKey.first; + // remove that key from continuousParentKeys: + continuousParentKeys.erase(std::remove(continuousParentKeys.begin(), + continuousParentKeys.end(), key), + continuousParentKeys.end()); + } + return continuousParentKeys; +} + +// Create a test for continuousParents. +TEST(GaussianMixture, ContinuousParents) { + const GaussianMixture gm = createSimpleGaussianMixture(); + const KeyVector continuousParentKeys = continuousParents(gm); + // Check that the continuous parent keys are correct: + EXPECT(continuousParentKeys.size() == 1); + EXPECT(continuousParentKeys[0] == X(0)); +} + +/* ************************************************************************* */ +// Create a likelihood factor for a Gaussian mixture, return a Mixture factor. +GaussianMixtureFactor::shared_ptr likelihood(const GaussianMixture& gm, + const VectorValues& frontals) { + // TODO(dellaert): check that values has all frontals + const DiscreteKeys discreteParentKeys = gm.discreteKeys(); + const KeyVector continuousParentKeys = continuousParents(gm); + const GaussianMixtureFactor::Factors likelihoods( + gm.conditionals(), + [&](const GaussianConditional::shared_ptr& conditional) { + return conditional->likelihood(frontals); + }); + return boost::make_shared( + continuousParentKeys, discreteParentKeys, likelihoods); +} + +/// Check that likelihood returns a mixture factor on the parents. +TEST(GaussianMixture, Likelihood) { + const GaussianMixture gm = createSimpleGaussianMixture(); // Call the likelihood function: VectorValues measurements; measurements.insert(Z(0), Vector1(0)); - HybridValues values(DiscreteValues(), measurements); - const auto factor = likelihood(values); + const auto factor = likelihood(gm, measurements); // Check that the factor is a mixture factor on the parents. - const GaussianMixtureFactor expected = GaussianMixtureFactor(); + // Loop over all discrete assignments over the discrete parents: + const DiscreteKeys discreteParentKeys = gm.discreteKeys(); + + // Apply the likelihood function to all conditionals: + const GaussianMixtureFactor::Factors factors( + gm.conditionals(), + [measurements](const GaussianConditional::shared_ptr& conditional) { + return conditional->likelihood(measurements); + }); + const GaussianMixtureFactor expected({X(0)}, {mode}, factors); EXPECT(assert_equal(*factor, expected)); } From 7ba53925253ca877ce81d385075e7c1c63b501f3 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 29 Dec 2022 13:28:20 -0500 Subject: [PATCH 36/41] likelihood method (as well as continuousParents) --- gtsam/hybrid/GaussianMixture.cpp | 31 ++++++++++++++++ gtsam/hybrid/GaussianMixture.h | 29 +++++++++------ gtsam/hybrid/tests/testGaussianMixture.cpp | 41 ++-------------------- 3 files changed, 52 insertions(+), 49 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 82d16226a..a5d06f04d 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -21,6 +21,7 @@ #include #include #include +#include #include #include @@ -128,6 +129,36 @@ void GaussianMixture::print(const std::string &s, }); } +/* ************************************************************************* */ +KeyVector GaussianMixture::continuousParents() const { + // Get all parent keys: + const auto range = parents(); + KeyVector continuousParentKeys(range.begin(), range.end()); + // Loop over all discrete keys: + for (const auto &discreteKey : discreteKeys()) { + const Key key = discreteKey.first; + // remove that key from continuousParentKeys: + continuousParentKeys.erase(std::remove(continuousParentKeys.begin(), + continuousParentKeys.end(), key), + continuousParentKeys.end()); + } + return continuousParentKeys; +} + +/* ************************************************************************* */ +boost::shared_ptr GaussianMixture::likelihood( + const VectorValues &frontals) const { + // TODO(dellaert): check that values has all frontals + const DiscreteKeys discreteParentKeys = discreteKeys(); + const KeyVector continuousParentKeys = continuousParents(); + const GaussianMixtureFactor::Factors likelihoods( + conditionals(), [&](const GaussianConditional::shared_ptr &conditional) { + return conditional->likelihood(frontals); + }); + return boost::make_shared( + continuousParentKeys, discreteParentKeys, likelihoods); +} + /* ************************************************************************* */ std::set DiscreteKeysAsSet(const DiscreteKeys &dkeys) { std::set s; diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index a3393dbb0..672a886ad 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -29,6 +29,8 @@ namespace gtsam { +class GaussianMixtureFactor; + /** * @brief A conditional of gaussian mixtures indexed by discrete variables, as * part of a Bayes Network. This is the result of the elimination of a @@ -117,16 +119,6 @@ class GTSAM_EXPORT GaussianMixture const DiscreteKeys &discreteParents, const std::vector &conditionals); - /// @} - /// @name Standard API - /// @{ - - GaussianConditional::shared_ptr operator()( - const DiscreteValues &discreteValues) const; - - /// Returns the total number of continuous components - size_t nrComponents() const; - /// @} /// @name Testable /// @{ @@ -140,6 +132,22 @@ class GTSAM_EXPORT GaussianMixture const KeyFormatter &formatter = DefaultKeyFormatter) const override; /// @} + /// @name Standard API + /// @{ + + GaussianConditional::shared_ptr operator()( + const DiscreteValues &discreteValues) const; + + /// Returns the total number of continuous components + size_t nrComponents() const; + + /// Returns the continuous keys among the parents. + KeyVector continuousParents() const; + + // Create a likelihood factor for a Gaussian mixture, return a Mixture factor + // on the parents. + boost::shared_ptr likelihood( + const VectorValues &frontals) const; /// Getter for the underlying Conditionals DecisionTree const Conditionals &conditionals() const; @@ -181,6 +189,7 @@ class GTSAM_EXPORT GaussianMixture * @return Sum */ Sum add(const Sum &sum) const; + /// @} }; /// Return the DiscreteKey vector as a set. diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index 5542d86a9..ed5771770 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -154,53 +154,16 @@ static GaussianMixture createSimpleGaussianMixture() { } /* ************************************************************************* */ -std::set DiscreteKeysAsSet(const DiscreteKeys& dkeys) { - std::set s; - s.insert(dkeys.begin(), dkeys.end()); - return s; -} - -// Get only the continuous parent keys as a KeyVector: -KeyVector continuousParents(const GaussianMixture& gm) { - // Get all parent keys: - const auto range = gm.parents(); - KeyVector continuousParentKeys(range.begin(), range.end()); - // Loop over all discrete keys: - for (const auto& discreteKey : gm.discreteKeys()) { - const Key key = discreteKey.first; - // remove that key from continuousParentKeys: - continuousParentKeys.erase(std::remove(continuousParentKeys.begin(), - continuousParentKeys.end(), key), - continuousParentKeys.end()); - } - return continuousParentKeys; -} - // Create a test for continuousParents. TEST(GaussianMixture, ContinuousParents) { const GaussianMixture gm = createSimpleGaussianMixture(); - const KeyVector continuousParentKeys = continuousParents(gm); + const KeyVector continuousParentKeys = gm.continuousParents(); // Check that the continuous parent keys are correct: EXPECT(continuousParentKeys.size() == 1); EXPECT(continuousParentKeys[0] == X(0)); } /* ************************************************************************* */ -// Create a likelihood factor for a Gaussian mixture, return a Mixture factor. -GaussianMixtureFactor::shared_ptr likelihood(const GaussianMixture& gm, - const VectorValues& frontals) { - // TODO(dellaert): check that values has all frontals - const DiscreteKeys discreteParentKeys = gm.discreteKeys(); - const KeyVector continuousParentKeys = continuousParents(gm); - const GaussianMixtureFactor::Factors likelihoods( - gm.conditionals(), - [&](const GaussianConditional::shared_ptr& conditional) { - return conditional->likelihood(frontals); - }); - return boost::make_shared( - continuousParentKeys, discreteParentKeys, likelihoods); -} - /// Check that likelihood returns a mixture factor on the parents. TEST(GaussianMixture, Likelihood) { const GaussianMixture gm = createSimpleGaussianMixture(); @@ -208,7 +171,7 @@ TEST(GaussianMixture, Likelihood) { // Call the likelihood function: VectorValues measurements; measurements.insert(Z(0), Vector1(0)); - const auto factor = likelihood(gm, measurements); + const auto factor = gm.likelihood(measurements); // Check that the factor is a mixture factor on the parents. // Loop over all discrete assignments over the discrete parents: From 69bb4db42d545103af4e75097e910deca1240281 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Dec 2022 23:59:34 +0530 Subject: [PATCH 37/41] HybridValues: continuous then discrete --- gtsam/hybrid/HybridBayesNet.cpp | 4 ++-- gtsam/hybrid/HybridBayesTree.cpp | 2 +- gtsam/hybrid/HybridValues.h | 20 ++++++++++---------- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index c598b7d62..d46d4d5c7 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -218,7 +218,7 @@ HybridValues HybridBayesNet::optimize() const { // Given the MPE, compute the optimal continuous values. GaussianBayesNet gbn = choose(mpe); - return HybridValues(mpe, gbn.optimize()); + return HybridValues(gbn.optimize(), mpe); } /* ************************************************************************* */ @@ -267,7 +267,7 @@ HybridValues HybridBayesNet::sample(const HybridValues &given, GaussianBayesNet gbn = choose(assignment); // Sample from the Gaussian Bayes net. VectorValues sample = gbn.sample(given.continuous(), rng); - return {assignment, sample}; + return {sample, assignment}; } /* ************************************************************************* */ diff --git a/gtsam/hybrid/HybridBayesTree.cpp b/gtsam/hybrid/HybridBayesTree.cpp index ed70a0aa9..f7c2d3364 100644 --- a/gtsam/hybrid/HybridBayesTree.cpp +++ b/gtsam/hybrid/HybridBayesTree.cpp @@ -58,7 +58,7 @@ HybridValues HybridBayesTree::optimize() const { } VectorValues values = optimize(mpe); - return HybridValues(mpe, values); + return HybridValues(values, mpe); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/HybridValues.h b/gtsam/hybrid/HybridValues.h index 944fe17e6..7fa212018 100644 --- a/gtsam/hybrid/HybridValues.h +++ b/gtsam/hybrid/HybridValues.h @@ -37,12 +37,12 @@ namespace gtsam { */ class GTSAM_EXPORT HybridValues { private: - // DiscreteValue stored the discrete components of the HybridValues. - DiscreteValues discrete_; - // VectorValue stored the continuous components of the HybridValues. VectorValues continuous_; + // DiscreteValue stored the discrete components of the HybridValues. + DiscreteValues discrete_; + public: /// @name Standard Constructors /// @{ @@ -51,8 +51,8 @@ class GTSAM_EXPORT HybridValues { HybridValues() = default; /// Construct from DiscreteValues and VectorValues. - HybridValues(const DiscreteValues& dv, const VectorValues& cv) - : discrete_(dv), continuous_(cv){}; + HybridValues(const VectorValues& cv, const DiscreteValues& dv) + : continuous_(cv), discrete_(dv){}; /// @} /// @name Testable @@ -62,15 +62,15 @@ class GTSAM_EXPORT HybridValues { void print(const std::string& s = "HybridValues", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { std::cout << s << ": \n"; - discrete_.print(" Discrete", keyFormatter); // print discrete components continuous_.print(" Continuous", - keyFormatter); // print continuous components + keyFormatter); // print continuous components + discrete_.print(" Discrete", keyFormatter); // print discrete components }; /// equals required by Testable for unit testing bool equals(const HybridValues& other, double tol = 1e-9) const { - return discrete_.equals(other.discrete_, tol) && - continuous_.equals(other.continuous_, tol); + return continuous_.equals(other.continuous_, tol) && + discrete_.equals(other.discrete_, tol); } /// @} @@ -130,8 +130,8 @@ class GTSAM_EXPORT HybridValues { std::string html( const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { std::stringstream ss; - ss << this->discrete_.html(keyFormatter); ss << this->continuous_.html(keyFormatter); + ss << this->discrete_.html(keyFormatter); return ss.str(); }; From d4ee6997f7e032b9720c554ecd2b6fe6ab96b545 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 29 Dec 2022 13:54:44 -0500 Subject: [PATCH 38/41] Remove FromConditionals --- gtsam/hybrid/GaussianMixture.cpp | 11 ++++------ gtsam/hybrid/GaussianMixture.h | 2 +- gtsam/hybrid/hybrid.i | 25 +++++++++++++--------- gtsam/hybrid/tests/testGaussianMixture.cpp | 4 +--- gtsam/hybrid/tests/testHybridBayesNet.cpp | 4 ++-- python/gtsam/tests/test_HybridBayesNet.py | 5 ++--- 6 files changed, 25 insertions(+), 26 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index a5d06f04d..e34467527 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -42,15 +42,12 @@ const GaussianMixture::Conditionals &GaussianMixture::conditionals() const { } /* *******************************************************************************/ -GaussianMixture GaussianMixture::FromConditionals( +GaussianMixture::GaussianMixture( const KeyVector &continuousFrontals, const KeyVector &continuousParents, const DiscreteKeys &discreteParents, - const std::vector &conditionalsList) { - Conditionals dt(discreteParents, conditionalsList); - - return GaussianMixture(continuousFrontals, continuousParents, discreteParents, - dt); -} + const std::vector &conditionalsList) + : GaussianMixture(continuousFrontals, continuousParents, discreteParents, + Conditionals(discreteParents, conditionalsList)) {} /* *******************************************************************************/ GaussianMixture::Sum GaussianMixture::add( diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index 672a886ad..2cdc23b46 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -114,7 +114,7 @@ class GTSAM_EXPORT GaussianMixture * @param discreteParents Discrete parents variables * @param conditionals List of conditionals */ - static This FromConditionals( + GaussianMixture( const KeyVector &continuousFrontals, const KeyVector &continuousParents, const DiscreteKeys &discreteParents, const std::vector &conditionals); diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index 2721612f9..29247cdc3 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -66,12 +66,13 @@ class GaussianMixtureFactor : gtsam::HybridFactor { #include class GaussianMixture : gtsam::HybridFactor { - static GaussianMixture FromConditionals( - const gtsam::KeyVector& continuousFrontals, - const gtsam::KeyVector& continuousParents, - const gtsam::DiscreteKeys& discreteParents, - const std::vector& - conditionalsList); + GaussianMixture(const gtsam::KeyVector& continuousFrontals, + const gtsam::KeyVector& continuousParents, + const gtsam::DiscreteKeys& discreteParents, + const std::vector& + conditionalsList); + + gtsam::GaussianMixtureFactor* likelihood(const gtsam::VectorValues &frontals) const; void print(string s = "GaussianMixture\n", const gtsam::KeyFormatter& keyFormatter = @@ -104,7 +105,7 @@ class HybridBayesTree { gtsam::DefaultKeyFormatter) const; }; -#include +#include class HybridBayesNet { HybridBayesNet(); void add(const gtsam::HybridConditional& s); @@ -113,6 +114,11 @@ class HybridBayesNet { void addDiscrete(const gtsam::DiscreteConditional* s); void emplaceMixture(const gtsam::GaussianMixture& s); + void emplaceMixture(const gtsam::KeyVector& continuousFrontals, + const gtsam::KeyVector& continuousParents, + const gtsam::DiscreteKeys& discreteParents, + const std::vector& + conditionalsList); void emplaceGaussian(const gtsam::GaussianConditional& s); void emplaceDiscrete(const gtsam::DiscreteConditional& s); void emplaceDiscrete(const gtsam::DiscreteKey& key, string spec); @@ -162,9 +168,8 @@ class HybridGaussianFactorGraph { void push_back(const gtsam::HybridBayesNet& bayesNet); void push_back(const gtsam::HybridBayesTree& bayesTree); void push_back(const gtsam::GaussianMixtureFactor* gmm); - - void add(gtsam::DecisionTreeFactor* factor); - void add(gtsam::JacobianFactor* factor); + void push_back(gtsam::DecisionTreeFactor* factor); + void push_back(gtsam::JacobianFactor* factor); bool empty() const; void remove(size_t i); diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index ed5771770..242c9ba41 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -148,9 +148,7 @@ static GaussianMixture createSimpleGaussianMixture() { GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 0.5)); const auto conditional1 = boost::make_shared( GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3)); - const auto gm = GaussianMixture::FromConditionals( - {Z(0)}, {X(0)}, {mode}, {conditional0, conditional1}); - return gm; + return GaussianMixture({Z(0)}, {X(0)}, {mode}, {conditional0, conditional1}); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 9a2ca1208..627d82694 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -88,8 +88,8 @@ TEST(HybridBayesNet, evaluateHybrid) { // Create hybrid Bayes net. HybridBayesNet bayesNet; bayesNet.emplaceGaussian(continuousConditional); - bayesNet.emplaceMixture(GaussianMixture::FromConditionals( - {X(1)}, {}, {Asia}, {conditional0, conditional1})); + GaussianMixture gm({X(1)}, {}, {Asia}, {conditional0, conditional1}); + bayesNet.emplaceMixture(gm); // copy :-( bayesNet.emplaceDiscrete(Asia, "99/1"); // Create values at which to evaluate. diff --git a/python/gtsam/tests/test_HybridBayesNet.py b/python/gtsam/tests/test_HybridBayesNet.py index 66cddf05e..af89a4ba7 100644 --- a/python/gtsam/tests/test_HybridBayesNet.py +++ b/python/gtsam/tests/test_HybridBayesNet.py @@ -42,14 +42,13 @@ class TestHybridBayesNet(GtsamTestCase): conditional1 = GaussianConditional(X(1), [2], I_1x1, model1) dkeys = DiscreteKeys() dkeys.push_back(Asia) - gm = GaussianMixture.FromConditionals([X(1)], [], dkeys, - [conditional0, conditional1]) # + gm = GaussianMixture([X(1)], [], dkeys, [conditional0, conditional1]) # Create hybrid Bayes net. bayesNet = HybridBayesNet() bayesNet.addGaussian(gc) bayesNet.addMixture(gm) - bayesNet.addDiscrete(Asia, "99/1") + bayesNet.emplaceDiscrete(Asia, "99/1") # Create values at which to evaluate. values = HybridValues() From 2d688a1986de6f48e2ac0a0fa8a9e3bcddeae2e0 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 29 Dec 2022 13:55:06 -0500 Subject: [PATCH 39/41] Added tests to convert Hybrid BN to corresponding "likelihood" FG --- python/gtsam/tests/test_HybridFactorGraph.py | 135 +++++++++++++++---- 1 file changed, 107 insertions(+), 28 deletions(-) diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index 37bb5b93c..37243b937 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -13,73 +13,152 @@ Author: Fan Jiang import unittest import numpy as np -from gtsam.symbol_shorthand import C, X +from gtsam.symbol_shorthand import C, M, X, Z from gtsam.utils.test_case import GtsamTestCase import gtsam +from gtsam import ( + DecisionTreeFactor, + DiscreteConditional, + DiscreteKeys, + GaussianConditional, + GaussianMixture, + GaussianMixtureFactor, + HybridGaussianFactorGraph, + JacobianFactor, + Ordering, + noiseModel, +) class TestHybridGaussianFactorGraph(GtsamTestCase): """Unit tests for HybridGaussianFactorGraph.""" + def test_create(self): """Test construction of hybrid factor graph.""" - noiseModel = gtsam.noiseModel.Unit.Create(3) - dk = gtsam.DiscreteKeys() + model = noiseModel.Unit.Create(3) + dk = DiscreteKeys() dk.push_back((C(0), 2)) - jf1 = gtsam.JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), - noiseModel) - jf2 = gtsam.JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), - noiseModel) + jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model) + jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model) - gmf = gtsam.GaussianMixtureFactor.FromFactors([X(0)], dk, [jf1, jf2]) + gmf = GaussianMixtureFactor([X(0)], dk, [jf1, jf2]) - hfg = gtsam.HybridGaussianFactorGraph() - hfg.add(jf1) - hfg.add(jf2) + hfg = HybridGaussianFactorGraph() + hfg.push_back(jf1) + hfg.push_back(jf2) hfg.push_back(gmf) hbn = hfg.eliminateSequential( - gtsam.Ordering.ColamdConstrainedLastHybridGaussianFactorGraph( - hfg, [C(0)])) + Ordering.ColamdConstrainedLastHybridGaussianFactorGraph(hfg, [C(0)]) + ) self.assertEqual(hbn.size(), 2) mixture = hbn.at(0).inner() - self.assertIsInstance(mixture, gtsam.GaussianMixture) + self.assertIsInstance(mixture, GaussianMixture) self.assertEqual(len(mixture.keys()), 2) discrete_conditional = hbn.at(hbn.size() - 1).inner() - self.assertIsInstance(discrete_conditional, gtsam.DiscreteConditional) + self.assertIsInstance(discrete_conditional, DiscreteConditional) def test_optimize(self): """Test construction of hybrid factor graph.""" - noiseModel = gtsam.noiseModel.Unit.Create(3) - dk = gtsam.DiscreteKeys() + model = noiseModel.Unit.Create(3) + dk = DiscreteKeys() dk.push_back((C(0), 2)) - jf1 = gtsam.JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), - noiseModel) - jf2 = gtsam.JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), - noiseModel) + jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model) + jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model) - gmf = gtsam.GaussianMixtureFactor.FromFactors([X(0)], dk, [jf1, jf2]) + gmf = GaussianMixtureFactor([X(0)], dk, [jf1, jf2]) - hfg = gtsam.HybridGaussianFactorGraph() - hfg.add(jf1) - hfg.add(jf2) + hfg = HybridGaussianFactorGraph() + hfg.push_back(jf1) + hfg.push_back(jf2) hfg.push_back(gmf) dtf = gtsam.DecisionTreeFactor([(C(0), 2)], "0 1") - hfg.add(dtf) + hfg.push_back(dtf) hbn = hfg.eliminateSequential( - gtsam.Ordering.ColamdConstrainedLastHybridGaussianFactorGraph( - hfg, [C(0)])) + Ordering.ColamdConstrainedLastHybridGaussianFactorGraph(hfg, [C(0)]) + ) hv = hbn.optimize() self.assertEqual(hv.atDiscrete(C(0)), 1) + @staticmethod + def tiny(num_measurements: int = 1): + """Create a tiny two variable hybrid model.""" + # Create hybrid Bayes net. + bayesNet = gtsam.HybridBayesNet() + + # Create mode key: 0 is low-noise, 1 is high-noise. + modeKey = M(0) + mode = (modeKey, 2) + + # Create Gaussian mixture Z(0) = X(0) + noise for each measurement. + I = np.eye(1) + keys = DiscreteKeys() + keys.push_back(mode) + for i in range(num_measurements): + conditional0 = GaussianConditional.FromMeanAndStddev( + Z(i), I, X(0), [0], sigma=0.5 + ) + conditional1 = GaussianConditional.FromMeanAndStddev( + Z(i), I, X(0), [0], sigma=3 + ) + bayesNet.emplaceMixture([Z(i)], [X(0)], keys, [conditional0, conditional1]) + + # Create prior on X(0). + prior_on_x0 = GaussianConditional.FromMeanAndStddev(X(0), [5.0], 5.0) + bayesNet.addGaussian(prior_on_x0) + + # Add prior on mode. + bayesNet.emplaceDiscrete(mode, "1/1") + + return bayesNet + + def test_tiny(self): + """Test a tiny two variable hybrid model.""" + bayesNet = self.tiny() + sample = bayesNet.sample() + # print(sample) + + # Create a factor graph from the Bayes net with sampled measurements. + fg = HybridGaussianFactorGraph() + conditional = bayesNet.atMixture(0) + measurement = gtsam.VectorValues() + measurement.insert(Z(0), sample.at(Z(0))) + factor = conditional.likelihood(measurement) + fg.push_back(factor) + fg.push_back(bayesNet.atGaussian(1)) + fg.push_back(bayesNet.atDiscrete(2)) + + self.assertEqual(fg.size(), 3) + + def test_tiny2(self): + """Test a tiny two variable hybrid model, with 2 measurements.""" + # Create the Bayes net and sample from it. + bayesNet = self.tiny(num_measurements=2) + sample = bayesNet.sample() + # print(sample) + + # Create a factor graph from the Bayes net with sampled measurements. + fg = HybridGaussianFactorGraph() + for i in range(2): + conditional = bayesNet.atMixture(i) + measurement = gtsam.VectorValues() + measurement.insert(Z(i), sample.at(Z(i))) + factor = conditional.likelihood(measurement) + fg.push_back(factor) + fg.push_back(bayesNet.atGaussian(2)) + fg.push_back(bayesNet.atDiscrete(3)) + + self.assertEqual(fg.size(), 4) + if __name__ == "__main__": unittest.main() From a4659f01c75d810c1a360dfaa58c77d26731dbce Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 29 Dec 2022 14:13:35 -0500 Subject: [PATCH 40/41] Add error and probPrime variants --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 43 +++++++++++++++++++- gtsam/hybrid/HybridGaussianFactorGraph.h | 25 ++++++++++++ gtsam/hybrid/hybrid.i | 6 +++ python/gtsam/tests/test_HybridFactorGraph.py | 19 +++++++-- 4 files changed, 88 insertions(+), 5 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 32653bdec..b110f8586 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -468,12 +468,51 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::error( return error_tree; } +/* ************************************************************************ */ +double HybridGaussianFactorGraph::error( + const VectorValues &continuousValues, + const DiscreteValues &discreteValues) const { + double error = 0.0; + for (size_t idx = 0; idx < size(); idx++) { + auto factor = factors_.at(idx); + + if (factor->isHybrid()) { + if (auto c = boost::dynamic_pointer_cast(factor)) { + error += c->asMixture()->error(continuousValues, discreteValues); + } + if (auto f = boost::dynamic_pointer_cast(factor)) { + error += f->error(continuousValues, discreteValues); + } + + } else if (factor->isContinuous()) { + if (auto f = boost::dynamic_pointer_cast(factor)) { + error += f->inner()->error(continuousValues); + } + if (auto cg = boost::dynamic_pointer_cast(factor)) { + error += cg->asGaussian()->error(continuousValues); + } + } + } + return error; +} + +/* ************************************************************************ */ +double HybridGaussianFactorGraph::probPrime( + const VectorValues &continuousValues, + const DiscreteValues &discreteValues) const { + double error = this->error(continuousValues, discreteValues); + // NOTE: The 0.5 term is handled by each factor + return std::exp(-error); +} + /* ************************************************************************ */ AlgebraicDecisionTree HybridGaussianFactorGraph::probPrime( const VectorValues &continuousValues) const { AlgebraicDecisionTree error_tree = this->error(continuousValues); - AlgebraicDecisionTree prob_tree = - error_tree.apply([](double error) { return exp(-error); }); + AlgebraicDecisionTree prob_tree = error_tree.apply([](double error) { + // NOTE: The 0.5 term is handled by each factor + return exp(-error); + }); return prob_tree; } diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index ac9ae1a46..9de18b6af 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -182,6 +182,19 @@ class GTSAM_EXPORT HybridGaussianFactorGraph */ AlgebraicDecisionTree error(const VectorValues& continuousValues) const; + /** + * @brief Compute error given a continuous vector values + * and a discrete assignment. + * + * @param continuousValues The continuous VectorValues + * for computing the error. + * @param discreteValues The specific discrete assignment + * whose error we wish to compute. + * @return double + */ + double error(const VectorValues& continuousValues, + const DiscreteValues& discreteValues) const; + /** * @brief Compute unnormalized probability \f$ P(X | M, Z) \f$ * for each discrete assignment, and return as a tree. @@ -193,6 +206,18 @@ class GTSAM_EXPORT HybridGaussianFactorGraph AlgebraicDecisionTree probPrime( const VectorValues& continuousValues) const; + /** + * @brief Compute the unnormalized posterior probability for a continuous + * vector values given a specific assignment. + * + * @param continuousValues The vector values for which to compute the + * posterior probability. + * @param discreteValues The specific assignment to use for the computation. + * @return double + */ + double probPrime(const VectorValues& continuousValues, + const DiscreteValues& discreteValues) const; + /** * @brief Return a Colamd constrained ordering where the discrete keys are * eliminated after the continuous keys. diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index 29247cdc3..3dbf5d542 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -180,6 +180,12 @@ class HybridGaussianFactorGraph { void print(string s = "") const; bool equals(const gtsam::HybridGaussianFactorGraph& fg, double tol = 1e-9) const; + // evaluation + double error(const gtsam::VectorValues& continuousValues, + const gtsam::DiscreteValues& discreteValues) const; + double probPrime(const gtsam::VectorValues& continuousValues, + const gtsam::DiscreteValues& discreteValues) const; + gtsam::HybridBayesNet* eliminateSequential(); gtsam::HybridBayesNet* eliminateSequential( gtsam::Ordering::OrderingType type); diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index 37243b937..2ebc87971 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -11,6 +11,7 @@ Author: Fan Jiang # pylint: disable=invalid-name, no-name-in-module, no-member import unittest +import math import numpy as np from gtsam.symbol_shorthand import C, M, X, Z @@ -110,7 +111,9 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): conditional1 = GaussianConditional.FromMeanAndStddev( Z(i), I, X(0), [0], sigma=3 ) - bayesNet.emplaceMixture([Z(i)], [X(0)], keys, [conditional0, conditional1]) + bayesNet.emplaceMixture( + [Z(i)], [X(0)], keys, [conditional0, conditional1] + ) # Create prior on X(0). prior_on_x0 = GaussianConditional.FromMeanAndStddev(X(0), [5.0], 5.0) @@ -136,7 +139,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): fg.push_back(factor) fg.push_back(bayesNet.atGaussian(1)) fg.push_back(bayesNet.atDiscrete(2)) - + self.assertEqual(fg.size(), 3) def test_tiny2(self): @@ -156,8 +159,18 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): fg.push_back(factor) fg.push_back(bayesNet.atGaussian(2)) fg.push_back(bayesNet.atDiscrete(3)) - + self.assertEqual(fg.size(), 4) + # Calculate ratio between Bayes net probability and the factor graph: + continuousValues = gtsam.VectorValues() + continuousValues.insert(X(0), sample.at(X(0))) + discreteValues = sample.discrete() + expected_ratio = bayesNet.evaluate(sample) / fg.probPrime( + continuousValues, discreteValues + ) + print(expected_ratio) + + # TODO(dellaert): Change the mode to 0 and calculate the ratio again. if __name__ == "__main__": From a34d09d094ef7d4343ce5d6169e9728536c14ef4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 30 Dec 2022 00:45:09 +0530 Subject: [PATCH 41/41] fix wrapper --- gtsam/hybrid/HybridValues.h | 2 +- gtsam/hybrid/hybrid.i | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridValues.h b/gtsam/hybrid/HybridValues.h index 7fa212018..80c942a83 100644 --- a/gtsam/hybrid/HybridValues.h +++ b/gtsam/hybrid/HybridValues.h @@ -96,7 +96,7 @@ class GTSAM_EXPORT HybridValues { * the key \c j is already used. * @param value The vector to be inserted. * @param j The index with which the value will be associated. */ - void insert(Key j, int value) { discrete_[j] = value; }; + void insert(Key j, size_t value) { discrete_[j] = value; }; /** Insert a vector \c value with key \c j. Throws an invalid_argument * exception if the key \c j is already used. diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index acda94638..c98c07f65 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -6,10 +6,11 @@ namespace gtsam { #include class HybridValues { - gtsam::DiscreteValues discrete() const; gtsam::VectorValues continuous() const; + gtsam::DiscreteValues discrete() const; + HybridValues(); - HybridValues(const gtsam::DiscreteValues &dv, const gtsam::VectorValues &cv); + HybridValues(const gtsam::VectorValues &cv, const gtsam::DiscreteValues &dv); void print(string s = "HybridValues", const gtsam::KeyFormatter& keyFormatter = gtsam::DefaultKeyFormatter) const;