From 843f53c6cc84fb206f95e74e862529990349b7a0 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 14 Oct 2022 16:50:19 -0400 Subject: [PATCH 01/29] move PreintegrationCombinedParams to its own file --- gtsam/navigation/CombinedImuFactor.h | 66 +---------- .../navigation/PreintegrationCombinedParams.h | 104 ++++++++++++++++++ 2 files changed, 108 insertions(+), 62 deletions(-) create mode 100644 gtsam/navigation/PreintegrationCombinedParams.h diff --git a/gtsam/navigation/CombinedImuFactor.h b/gtsam/navigation/CombinedImuFactor.h index 69d72ad9b..05e4b481e 100644 --- a/gtsam/navigation/CombinedImuFactor.h +++ b/gtsam/navigation/CombinedImuFactor.h @@ -23,11 +23,12 @@ #pragma once /* GTSAM includes */ -#include -#include -#include #include #include +#include +#include +#include +#include namespace gtsam { @@ -57,65 +58,6 @@ typedef ManifoldPreintegration PreintegrationType; * Robotics: Science and Systems (RSS), 2015. */ -/// Parameters for pre-integration using PreintegratedCombinedMeasurements: -/// Usage: Create just a single Params and pass a shared pointer to the constructor -struct GTSAM_EXPORT PreintegrationCombinedParams : PreintegrationParams { - Matrix3 biasAccCovariance; ///< continuous-time "Covariance" describing accelerometer bias random walk - Matrix3 biasOmegaCovariance; ///< continuous-time "Covariance" describing gyroscope bias random walk - Matrix6 biasAccOmegaInt; ///< covariance of bias used as initial estimate. - - /// Default constructor makes uninitialized params struct. - /// Used for serialization. - PreintegrationCombinedParams() - : biasAccCovariance(I_3x3), - biasOmegaCovariance(I_3x3), - biasAccOmegaInt(I_6x6) {} - - /// See two named constructors below for good values of n_gravity in body frame - PreintegrationCombinedParams(const Vector3& n_gravity) : - PreintegrationParams(n_gravity), biasAccCovariance(I_3x3), - biasOmegaCovariance(I_3x3), biasAccOmegaInt(I_6x6) { - - } - - // Default Params for a Z-down navigation frame, such as NED: gravity points along positive Z-axis - static boost::shared_ptr MakeSharedD(double g = 9.81) { - return boost::shared_ptr(new PreintegrationCombinedParams(Vector3(0, 0, g))); - } - - // Default Params for a Z-up navigation frame, such as ENU: gravity points along negative Z-axis - static boost::shared_ptr MakeSharedU(double g = 9.81) { - return boost::shared_ptr(new PreintegrationCombinedParams(Vector3(0, 0, -g))); - } - - void print(const std::string& s="") const override; - bool equals(const PreintegratedRotationParams& other, double tol) const override; - - void setBiasAccCovariance(const Matrix3& cov) { biasAccCovariance=cov; } - void setBiasOmegaCovariance(const Matrix3& cov) { biasOmegaCovariance=cov; } - void setBiasAccOmegaInit(const Matrix6& cov) { biasAccOmegaInt=cov; } - - const Matrix3& getBiasAccCovariance() const { return biasAccCovariance; } - const Matrix3& getBiasOmegaCovariance() const { return biasOmegaCovariance; } - const Matrix6& getBiasAccOmegaInit() const { return biasAccOmegaInt; } - -private: - - /** Serialization function */ - friend class boost::serialization::access; - template - void serialize(ARCHIVE& ar, const unsigned int /*version*/) { - namespace bs = ::boost::serialization; - ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(PreintegrationParams); - ar & BOOST_SERIALIZATION_NVP(biasAccCovariance); - ar & BOOST_SERIALIZATION_NVP(biasOmegaCovariance); - ar & BOOST_SERIALIZATION_NVP(biasAccOmegaInt); - } - -public: - GTSAM_MAKE_ALIGNED_OPERATOR_NEW -}; - /** * PreintegratedCombinedMeasurements integrates the IMU measurements * (rotation rates and accelerations) and the corresponding covariance matrix. diff --git a/gtsam/navigation/PreintegrationCombinedParams.h b/gtsam/navigation/PreintegrationCombinedParams.h new file mode 100644 index 000000000..2c99463bc --- /dev/null +++ b/gtsam/navigation/PreintegrationCombinedParams.h @@ -0,0 +1,104 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file PreintegrationCombinedParams.h + * @author Luca Carlone + * @author Stephen Williams + * @author Richard Roberts + * @author Vadim Indelman + * @author David Jensen + * @author Frank Dellaert + * @author Varun Agrawal + **/ + +#pragma once + +/* GTSAM includes */ +#include +#include +#include +#include +#include +#include + +namespace gtsam { + +/// Parameters for pre-integration using PreintegratedCombinedMeasurements: +/// Usage: Create just a single Params and pass a shared pointer to the +/// constructor +struct GTSAM_EXPORT PreintegrationCombinedParams : PreintegrationParams { + Matrix3 biasAccCovariance; ///< continuous-time "Covariance" describing + ///< accelerometer bias random walk + Matrix3 biasOmegaCovariance; ///< continuous-time "Covariance" describing + ///< gyroscope bias random walk + Matrix6 biasAccOmegaInt; ///< covariance of bias used as initial estimate. + + /// Default constructor makes uninitialized params struct. + /// Used for serialization. + PreintegrationCombinedParams() + : biasAccCovariance(I_3x3), + biasOmegaCovariance(I_3x3), + biasAccOmegaInt(I_6x6) {} + + /// See two named constructors below for good values of n_gravity in body + /// frame + PreintegrationCombinedParams(const Vector3& n_gravity) + : PreintegrationParams(n_gravity), + biasAccCovariance(I_3x3), + biasOmegaCovariance(I_3x3), + biasAccOmegaInt(I_6x6) {} + + // Default Params for a Z-down navigation frame, such as NED: gravity points + // along positive Z-axis + static boost::shared_ptr MakeSharedD( + double g = 9.81) { + return boost::shared_ptr( + new PreintegrationCombinedParams(Vector3(0, 0, g))); + } + + // Default Params for a Z-up navigation frame, such as ENU: gravity points + // along negative Z-axis + static boost::shared_ptr MakeSharedU( + double g = 9.81) { + return boost::shared_ptr( + new PreintegrationCombinedParams(Vector3(0, 0, -g))); + } + + void print(const std::string& s = "") const override; + bool equals(const PreintegratedRotationParams& other, + double tol) const override; + + void setBiasAccCovariance(const Matrix3& cov) { biasAccCovariance = cov; } + void setBiasOmegaCovariance(const Matrix3& cov) { biasOmegaCovariance = cov; } + void setBiasAccOmegaInit(const Matrix6& cov) { biasAccOmegaInt = cov; } + + const Matrix3& getBiasAccCovariance() const { return biasAccCovariance; } + const Matrix3& getBiasOmegaCovariance() const { return biasOmegaCovariance; } + const Matrix6& getBiasAccOmegaInit() const { return biasAccOmegaInt; } + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE& ar, const unsigned int /*version*/) { + namespace bs = ::boost::serialization; + ar& BOOST_SERIALIZATION_BASE_OBJECT_NVP(PreintegrationParams); + ar& BOOST_SERIALIZATION_NVP(biasAccCovariance); + ar& BOOST_SERIALIZATION_NVP(biasOmegaCovariance); + ar& BOOST_SERIALIZATION_NVP(biasAccOmegaInt); + } + + public: + GTSAM_MAKE_ALIGNED_OPERATOR_NEW +}; + +} // namespace gtsam From 090fb4e275179c94a37d838e0f3b6c8d119606db Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 14 Oct 2022 17:11:24 -0400 Subject: [PATCH 02/29] Add python example for CombinedImuFactor --- .../examples/CombinedImuFactorExample.py | 251 ++++++++++++++++++ 1 file changed, 251 insertions(+) create mode 100644 python/gtsam/examples/CombinedImuFactorExample.py diff --git a/python/gtsam/examples/CombinedImuFactorExample.py b/python/gtsam/examples/CombinedImuFactorExample.py new file mode 100644 index 000000000..82714dd7c --- /dev/null +++ b/python/gtsam/examples/CombinedImuFactorExample.py @@ -0,0 +1,251 @@ +""" +GTSAM Copyright 2010-2019, Georgia Tech Research Corporation, +Atlanta, Georgia 30332-0415 +All Rights Reserved + +See LICENSE for the license information + +A script validating and demonstrating the CobiendImuFactor inference. + +Author: Varun Agrawal +""" + +# pylint: disable=no-name-in-module,unused-import,arguments-differ,import-error,wrong-import-order + +from __future__ import print_function + +import argparse +import math + +import gtsam +import matplotlib.pyplot as plt +import numpy as np +from gtsam.symbol_shorthand import B, V, X +from gtsam.utils.plot import plot_pose3 +from mpl_toolkits.mplot3d import Axes3D + +from PreintegrationExample import POSES_FIG, PreintegrationExample + +GRAVITY = 9.81 + +np.set_printoptions(precision=3, suppress=True) + + +def parse_args() -> argparse.Namespace: + """Parse command line arguments.""" + parser = argparse.ArgumentParser("CombinedImuFactorExample.py") + parser.add_argument("--twist_scenario", + default="sick_twist", + choices=("zero_twist", "forward_twist", "loop_twist", + "sick_twist")) + parser.add_argument("--time", + "-T", + default=12, + type=int, + help="Total navigation time in seconds") + parser.add_argument("--compute_covariances", + default=False, + action='store_true') + parser.add_argument("--verbose", default=False, action='store_true') + return parser.parse_args() + + +class CombinedImuFactorExample(PreintegrationExample): + """Class to run example of the Imu Factor.""" + def __init__(self, twist_scenario: str = "sick_twist"): + self.velocity = np.array([2, 0, 0]) + self.priorNoise = gtsam.noiseModel.Isotropic.Sigma(6, 0.1) + self.velNoise = gtsam.noiseModel.Isotropic.Sigma(3, 0.1) + self.biasNoise = gtsam.noiseModel.Isotropic.Sigma(6, 0.001) + + # Choose one of these twists to change scenario: + twist_scenarios = dict( + zero_twist=(np.zeros(3), np.zeros(3)), + forward_twist=(np.zeros(3), self.velocity), + loop_twist=(np.array([0, -math.radians(30), 0]), self.velocity), + sick_twist=(np.array([math.radians(30), -math.radians(30), + 0]), self.velocity)) + + accBias = np.array([-0.3, 0.1, 0.2]) + gyroBias = np.array([0.1, 0.3, -0.1]) + bias = gtsam.imuBias.ConstantBias(accBias, gyroBias) + + params = gtsam.PreintegrationCombinedParams.MakeSharedU(GRAVITY) + + # Some arbitrary noise sigmas + gyro_sigma = 1e-3 + accel_sigma = 1e-3 + I_3x3 = np.eye(3) + params.setGyroscopeCovariance(gyro_sigma**2 * I_3x3) + params.setAccelerometerCovariance(accel_sigma**2 * I_3x3) + params.setIntegrationCovariance(1e-7**2 * I_3x3) + + dt = 1e-2 + super(CombinedImuFactorExample, + self).__init__(twist_scenarios[twist_scenario], bias, params, dt) + + def addPrior(self, i: int, graph: gtsam.NonlinearFactorGraph): + """Add a prior on the navigation state at time `i`.""" + state = self.scenario.navState(i) + graph.push_back( + gtsam.PriorFactorPose3(X(i), state.pose(), self.priorNoise)) + graph.push_back( + gtsam.PriorFactorVector(V(i), state.velocity(), self.velNoise)) + graph.push_back( + gtsam.PriorFactorConstantBias(B(i), self.actualBias, + self.biasNoise)) + + def optimize(self, graph: gtsam.NonlinearFactorGraph, + initial: gtsam.Values): + """Optimize using Levenberg-Marquardt optimization.""" + params = gtsam.LevenbergMarquardtParams() + params.setVerbosityLM("SUMMARY") + optimizer = gtsam.LevenbergMarquardtOptimizer(graph, initial, params) + result = optimizer.optimize() + return result + + def plot(self, + values: gtsam.Values, + title: str = "Estimated Trajectory", + fignum: int = POSES_FIG + 1, + show: bool = False): + """ + Plot poses in values. + + Args: + values: The values object with the poses to plot. + title: The title of the plot. + fignum: The matplotlib figure number. + POSES_FIG is a value from the PreintegrationExample + which we simply increment to generate a new figure. + show: Flag indicating whether to display the figure. + """ + i = 0 + while values.exists(X(i)): + pose_i = values.atPose3(X(i)) + plot_pose3(fignum, pose_i, 1) + i += 1 + plt.title(title) + + gtsam.utils.plot.set_axes_equal(fignum) + + i = 0 + while values.exists(B(i)): + print("Bias Value {0}".format(i), values.atConstantBias(B(i))) + i += 1 + + plt.ioff() + + if show: + plt.show() + + def run(self, + T: int = 12, + compute_covariances: bool = False, + verbose: bool = True): + """ + Main runner. + + Args: + T: Total trajectory time. + compute_covariances: Flag indicating whether to compute marginal covariances. + verbose: Flag indicating if printing should be verbose. + """ + graph = gtsam.NonlinearFactorGraph() + + # initialize data structure for pre-integrated IMU measurements + pim = gtsam.PreintegratedCombinedMeasurements(self.params, + self.actualBias) + + num_poses = T # assumes 1 factor per second + initial = gtsam.Values() + + # simulate the loop + i = 0 # state index + initial_state_i = self.scenario.navState(0) + initial.insert(X(i), initial_state_i.pose()) + initial.insert(V(i), initial_state_i.velocity()) + initial.insert(B(i), self.actualBias) + + # add prior on beginning + self.addPrior(0, graph) + + for k, t in enumerate(np.arange(0, T, self.dt)): + # get measurements and add them to PIM + measuredOmega = self.runner.measuredAngularVelocity(t) + measuredAcc = self.runner.measuredSpecificForce(t) + pim.integrateMeasurement(measuredAcc, measuredOmega, self.dt) + + # Plot IMU many times + if k % 10 == 0: + self.plotImu(t, measuredOmega, measuredAcc) + + if (k + 1) % int(1 / self.dt) == 0: + # Plot every second + self.plotGroundTruthPose(t, scale=1) + plt.title("Ground Truth Trajectory") + + # create IMU factor every second + factor = gtsam.CombinedImuFactor(X(i), V(i), X(i + 1), + V(i + 1), B(i), B(i + 1), pim) + graph.push_back(factor) + + if verbose: + print(factor) + print(pim.predict(initial_state_i, self.actualBias)) + + pim.resetIntegration() + + rotationNoise = gtsam.Rot3.Expmap(np.random.randn(3) * 0.1) + translationNoise = gtsam.Point3(*np.random.randn(3) * 1) + poseNoise = gtsam.Pose3(rotationNoise, translationNoise) + + actual_state_i = self.scenario.navState(t + self.dt) + print("Actual state at {0}:\n{1}".format( + t + self.dt, actual_state_i)) + + noisy_state_i = gtsam.NavState( + actual_state_i.pose().compose(poseNoise), + actual_state_i.velocity() + np.random.randn(3) * 0.1) + noisy_bias_i = self.actualBias + gtsam.imuBias.ConstantBias( + np.random.randn(3) * 0.1, + np.random.randn(3) * 0.1) + + initial.insert(X(i + 1), noisy_state_i.pose()) + initial.insert(V(i + 1), noisy_state_i.velocity()) + initial.insert(B(i + 1), noisy_bias_i) + i += 1 + + # add priors on end + self.addPrior(num_poses - 1, graph) + + initial.print("Initial values:") + + result = self.optimize(graph, initial) + + result.print("Optimized values:") + print("------------------") + print("Initial Error =", graph.error(initial)) + print("Final Error =", graph.error(result)) + print("------------------") + + if compute_covariances: + # Calculate and print marginal covariances + marginals = gtsam.Marginals(graph, result) + print("Covariance on bias:\n", + marginals.marginalCovariance(BIAS_KEY)) + for i in range(num_poses): + print("Covariance on pose {}:\n{}\n".format( + i, marginals.marginalCovariance(X(i)))) + print("Covariance on vel {}:\n{}\n".format( + i, marginals.marginalCovariance(V(i)))) + + self.plot(result, show=True) + + +if __name__ == '__main__': + args = parse_args() + + CombinedImuFactorExample(args.twist_scenario).run(args.time, + args.compute_covariances, + args.verbose) From 9891449049a0b359d486e882c21a84d07c8cf5e0 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 15 Oct 2022 09:02:45 -0400 Subject: [PATCH 03/29] rename jacobian variables so it matches the math in ImuFactor.pdf --- gtsam/navigation/CombinedImuFactor.cpp | 28 +++++++++++++------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/gtsam/navigation/CombinedImuFactor.cpp b/gtsam/navigation/CombinedImuFactor.cpp index 8d3a7dd31..5260597fa 100644 --- a/gtsam/navigation/CombinedImuFactor.cpp +++ b/gtsam/navigation/CombinedImuFactor.cpp @@ -110,21 +110,21 @@ void PreintegratedCombinedMeasurements::integrateMeasurement( // and preintegrated measurements // Single Jacobians to propagate covariance - Matrix3 theta_H_biasOmega = C.topRows<3>(); - Matrix3 pos_H_biasAcc = B.middleRows<3>(3); - Matrix3 vel_H_biasAcc = B.bottomRows<3>(); + Matrix3 theta_H_omega = C.topRows<3>(); + Matrix3 pos_H_acc = B.middleRows<3>(3); + Matrix3 vel_H_acc = B.bottomRows<3>(); - Matrix3 theta_H_biasOmegaInit = -theta_H_biasOmega; - Matrix3 pos_H_biasAccInit = -pos_H_biasAcc; - Matrix3 vel_H_biasAccInit = -vel_H_biasAcc; + Matrix3 theta_H_biasOmegaInit = -theta_H_omega; + Matrix3 pos_H_biasAccInit = -pos_H_acc; + Matrix3 vel_H_biasAccInit = -vel_H_acc; // overall Jacobian wrt preintegrated measurements (df/dx) Eigen::Matrix F; F.setZero(); F.block<9, 9>(0, 0) = A; - F.block<3, 3>(0, 12) = theta_H_biasOmega; - F.block<3, 3>(3, 9) = pos_H_biasAcc; - F.block<3, 3>(6, 9) = vel_H_biasAcc; + F.block<3, 3>(0, 12) = theta_H_omega; + F.block<3, 3>(3, 9) = pos_H_acc; + F.block<3, 3>(6, 9) = vel_H_acc; F.block<6, 6>(9, 9) = I_6x6; // Update the uncertainty on the state (matrix F in [4]). @@ -149,17 +149,17 @@ void PreintegratedCombinedMeasurements::integrateMeasurement( // BLOCK DIAGONAL TERMS D_R_R(&G_measCov_Gt) = - (theta_H_biasOmega * (wCov / dt) * theta_H_biasOmega.transpose()) // + (theta_H_omega * (wCov / dt) * theta_H_omega.transpose()) // + (theta_H_biasOmegaInit * bInitCov22 * theta_H_biasOmegaInit.transpose()); D_t_t(&G_measCov_Gt) = - (pos_H_biasAcc * (aCov / dt) * pos_H_biasAcc.transpose()) // + (pos_H_acc * (aCov / dt) * pos_H_acc.transpose()) // + (pos_H_biasAccInit * bInitCov11 * pos_H_biasAccInit.transpose()) // + (dt * iCov); D_v_v(&G_measCov_Gt) = - (vel_H_biasAcc * (aCov / dt) * vel_H_biasAcc.transpose()) // + (vel_H_acc * (aCov / dt) * vel_H_acc.transpose()) // + (vel_H_biasAccInit * bInitCov11 * vel_H_biasAccInit.transpose()); D_a_a(&G_measCov_Gt) = dt * p().biasAccCovariance; @@ -173,12 +173,12 @@ void PreintegratedCombinedMeasurements::integrateMeasurement( D_t_R(&G_measCov_Gt) = pos_H_biasAccInit * bInitCov12 * theta_H_biasOmegaInit.transpose(); D_t_v(&G_measCov_Gt) = - (pos_H_biasAcc * (aCov / dt) * vel_H_biasAcc.transpose()) + + (pos_H_acc * (aCov / dt) * vel_H_acc.transpose()) + (pos_H_biasAccInit * bInitCov11 * vel_H_biasAccInit.transpose()); D_v_R(&G_measCov_Gt) = vel_H_biasAccInit * bInitCov12 * theta_H_biasOmegaInit.transpose(); D_v_t(&G_measCov_Gt) = - (vel_H_biasAcc * (aCov / dt) * pos_H_biasAcc.transpose()) + + (vel_H_acc * (aCov / dt) * pos_H_acc.transpose()) + (vel_H_biasAccInit * bInitCov11 * pos_H_biasAccInit.transpose()); preintMeasCov_.noalias() += G_measCov_Gt; From 7f14b6e6ce5180dbea1115eea0144e4f627f1cd4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 15 Oct 2022 09:29:00 -0400 Subject: [PATCH 04/29] update resetIntegration to set the initial covariance of the bias --- gtsam/navigation/CombinedImuFactor.cpp | 11 +++++++ gtsam/navigation/CombinedImuFactor.h | 9 +++++ .../tests/testCombinedImuFactor.cpp | 33 +++++++++++++++++++ 3 files changed, 53 insertions(+) diff --git a/gtsam/navigation/CombinedImuFactor.cpp b/gtsam/navigation/CombinedImuFactor.cpp index 5260597fa..57e86d3b6 100644 --- a/gtsam/navigation/CombinedImuFactor.cpp +++ b/gtsam/navigation/CombinedImuFactor.cpp @@ -72,6 +72,17 @@ bool PreintegratedCombinedMeasurements::equals( //------------------------------------------------------------------------------ void PreintegratedCombinedMeasurements::resetIntegration() { + // Set the initial bias covariance to + // the estimated covariance from the last step. + p().biasAccOmegaInt = preintMeasCov_.block<6, 6>(9, 9); + PreintegrationType::resetIntegration(); + preintMeasCov_.setZero(); +} + +//------------------------------------------------------------------------------ +void PreintegratedCombinedMeasurements::resetIntegration( + const gtsam::Matrix6& Q_init) { + p().biasAccOmegaInt = Q_init; PreintegrationType::resetIntegration(); preintMeasCov_.setZero(); } diff --git a/gtsam/navigation/CombinedImuFactor.h b/gtsam/navigation/CombinedImuFactor.h index 05e4b481e..ff247009e 100644 --- a/gtsam/navigation/CombinedImuFactor.h +++ b/gtsam/navigation/CombinedImuFactor.h @@ -126,6 +126,15 @@ public: /// Re-initialize PreintegratedCombinedMeasurements void resetIntegration() override; + /** + * @brief Re-initialize PreintegratedCombinedMeasurements with initial bias + * covariance estimate. + * + * @param Q_init The initial bias covariance estimates as 6x6 matrix. If not + * provided, it uses the last values from the preintMeasCov. + */ + void resetIntegration(const gtsam::Matrix6& Q_init); + /// const reference to params, shadows definition in base class Params& p() const { return *boost::static_pointer_cast(this->p_); } /// @} diff --git a/gtsam/navigation/tests/testCombinedImuFactor.cpp b/gtsam/navigation/tests/testCombinedImuFactor.cpp index aacfff0f0..250a0dc42 100644 --- a/gtsam/navigation/tests/testCombinedImuFactor.cpp +++ b/gtsam/navigation/tests/testCombinedImuFactor.cpp @@ -250,6 +250,7 @@ TEST(CombinedImuFactor, CheckCovariance) { EXPECT(assert_equal(expected, actual.preintMeasCov())); } +/* ************************************************************************* */ // Test that the covariance values for the ImuFactor and the CombinedImuFactor // (top-left 9x9) are the same TEST(CombinedImuFactor, SameCovariance) { @@ -316,6 +317,38 @@ TEST(CombinedImuFactor, Accelerating) { EXPECT(assert_equal(estimatedCov, expected, 0.1)); } +/* ************************************************************************* */ +TEST(CombinedImuFactor, ResetIntegration) { + const double a = 0.2, v = 50; + + // Set up body pointing towards y axis, and start at 10,20,0 with velocity + // going in X The body itself has Z axis pointing down + const Rot3 nRb(Point3(0, 1, 0), Point3(1, 0, 0), Point3(0, 0, -1)); + const Point3 initial_position(10, 20, 0); + const Vector3 initial_velocity(v, 0, 0); + + const AcceleratingScenario scenario(nRb, initial_position, initial_velocity, + Vector3(a, 0, 0)); + + const double T = 3.0; // seconds + + CombinedScenarioRunner runner(scenario, testing::Params(), T / 10); + + PreintegratedCombinedMeasurements pim = runner.integrate(T); + // Make copy for testing different conditions + PreintegratedCombinedMeasurements pim2 = pim; + + // Test default method + pim.resetIntegration(); + Matrix6 expected = Z_6x6; + EXPECT(assert_equal(expected, pim.p().biasAccOmegaInt , 1e-9)); + + // Test method where Q_init is provided + Matrix6 expected_Q_init = I_6x6 * 0.001; + pim2.resetIntegration(expected_Q_init); + EXPECT(assert_equal(expected_Q_init, pim.p().biasAccOmegaInt, 1e-9)); +} + /* ************************************************************************* */ int main() { TestResult tr; From 6eef703019e38af836d3dadbffb6d5627e4ff657 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 15 Nov 2022 16:21:42 -0500 Subject: [PATCH 05/29] reorder operations in resetIntegration and add docstrings --- gtsam/navigation/CombinedImuFactor.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/gtsam/navigation/CombinedImuFactor.cpp b/gtsam/navigation/CombinedImuFactor.cpp index 57e86d3b6..142faedd3 100644 --- a/gtsam/navigation/CombinedImuFactor.cpp +++ b/gtsam/navigation/CombinedImuFactor.cpp @@ -72,18 +72,20 @@ bool PreintegratedCombinedMeasurements::equals( //------------------------------------------------------------------------------ void PreintegratedCombinedMeasurements::resetIntegration() { + // Base class method to reset the preintegrated measurements + PreintegrationType::resetIntegration(); // Set the initial bias covariance to // the estimated covariance from the last step. p().biasAccOmegaInt = preintMeasCov_.block<6, 6>(9, 9); - PreintegrationType::resetIntegration(); preintMeasCov_.setZero(); } //------------------------------------------------------------------------------ void PreintegratedCombinedMeasurements::resetIntegration( const gtsam::Matrix6& Q_init) { - p().biasAccOmegaInt = Q_init; + // Base class method to reset the preintegrated measurements PreintegrationType::resetIntegration(); + p().biasAccOmegaInt = Q_init; preintMeasCov_.setZero(); } From dc83c6f6185ade4bc0b56914c9d276662cbeb5c7 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 15 Nov 2022 19:22:33 -0500 Subject: [PATCH 06/29] update CombinedScenarioRunner to accept preintMeasCov --- gtsam/navigation/ScenarioRunner.cpp | 2 +- gtsam/navigation/ScenarioRunner.h | 8 ++++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/gtsam/navigation/ScenarioRunner.cpp b/gtsam/navigation/ScenarioRunner.cpp index 9d3e258de..856f0e692 100644 --- a/gtsam/navigation/ScenarioRunner.cpp +++ b/gtsam/navigation/ScenarioRunner.cpp @@ -108,7 +108,7 @@ Matrix6 ScenarioRunner::estimateNoiseCovariance(size_t N) const { PreintegratedCombinedMeasurements CombinedScenarioRunner::integrate( double T, const Bias& estimatedBias, bool corrupted) const { gttic_(integrate); - PreintegratedCombinedMeasurements pim(p_, estimatedBias); + PreintegratedCombinedMeasurements pim(p_, estimatedBias, preintMeasCov_); const double dt = imuSampleTime(); const size_t nrSteps = T / dt; diff --git a/gtsam/navigation/ScenarioRunner.h b/gtsam/navigation/ScenarioRunner.h index cee5a54ab..7f1efa063 100644 --- a/gtsam/navigation/ScenarioRunner.h +++ b/gtsam/navigation/ScenarioRunner.h @@ -118,15 +118,19 @@ class GTSAM_EXPORT CombinedScenarioRunner : public ScenarioRunner { private: const SharedParams p_; const Bias estimatedBias_; + const Eigen::Matrix preintMeasCov_; public: CombinedScenarioRunner(const Scenario& scenario, const SharedParams& p, double imuSampleTime = 1.0 / 100.0, - const Bias& bias = Bias()) + const Bias& bias = Bias(), + const Eigen::Matrix& preintMeasCov = + Eigen::Matrix::Zero()) : ScenarioRunner(scenario, static_cast(p), imuSampleTime, bias), p_(p), - estimatedBias_(bias) {} + estimatedBias_(bias), + preintMeasCov_(preintMeasCov) {} /// Integrate measurements for T seconds into a PIM PreintegratedCombinedMeasurements integrate( From 41d0606816524f3e546e641e5887c32345513bd6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 15 Nov 2022 19:23:55 -0500 Subject: [PATCH 07/29] update PreintegratedCombinedMeasurements constructor to take default preintMeasCov --- gtsam/navigation/CombinedImuFactor.h | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/gtsam/navigation/CombinedImuFactor.h b/gtsam/navigation/CombinedImuFactor.h index ff247009e..71169ab2c 100644 --- a/gtsam/navigation/CombinedImuFactor.h +++ b/gtsam/navigation/CombinedImuFactor.h @@ -95,15 +95,16 @@ public: /** * Default constructor, initializes the class with no measurements - * @param p Parameters, typically fixed in a single application + * @param p Parameters, typically fixed in a single application * @param biasHat Current estimate of acceleration and rotation rate biases + * @param preintMeasCov Covariance matrix used in noise model. */ PreintegratedCombinedMeasurements( const boost::shared_ptr& p, - const imuBias::ConstantBias& biasHat = imuBias::ConstantBias()) - : PreintegrationType(p, biasHat) { - preintMeasCov_.setZero(); - } + const imuBias::ConstantBias& biasHat = imuBias::ConstantBias(), + const Eigen::Matrix& preintMeasCov = + Eigen::Matrix::Zero()) + : PreintegrationType(p, biasHat), preintMeasCov_(preintMeasCov) {} /** * Construct preintegrated directly from members: base class and preintMeasCov From 3006a764446d6b7680a2aae840f32dc642d26c8a Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 15 Nov 2022 19:24:22 -0500 Subject: [PATCH 08/29] update CombinedImuFactor test to better test for integration reset --- .../tests/testCombinedImuFactor.cpp | 22 +++++++++++++------ 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/gtsam/navigation/tests/testCombinedImuFactor.cpp b/gtsam/navigation/tests/testCombinedImuFactor.cpp index 250a0dc42..fcda1da11 100644 --- a/gtsam/navigation/tests/testCombinedImuFactor.cpp +++ b/gtsam/navigation/tests/testCombinedImuFactor.cpp @@ -36,14 +36,17 @@ namespace testing { // Create default parameters with Z-down and above noise parameters -static boost::shared_ptr Params() { +static boost::shared_ptr Params( + const Matrix3& biasAccCovariance = Matrix3::Zero(), + const Matrix3& biasOmegaCovariance = Matrix3::Zero(), + const Matrix6& biasAccOmegaInt = Matrix6::Zero()) { auto p = PreintegratedCombinedMeasurements::Params::MakeSharedD(kGravity); p->gyroscopeCovariance = kGyroSigma * kGyroSigma * I_3x3; p->accelerometerCovariance = kAccelSigma * kAccelSigma * I_3x3; p->integrationCovariance = 0.0001 * I_3x3; - p->biasAccCovariance = Z_3x3; - p->biasOmegaCovariance = Z_3x3; - p->biasAccOmegaInt = Z_6x6; + p->biasAccCovariance = biasAccCovariance; + p->biasOmegaCovariance = biasOmegaCovariance; + p->biasAccOmegaInt = biasAccOmegaInt; return p; } } // namespace testing @@ -332,7 +335,12 @@ TEST(CombinedImuFactor, ResetIntegration) { const double T = 3.0; // seconds - CombinedScenarioRunner runner(scenario, testing::Params(), T / 10); + auto preinMeasCov = 0.001 * Eigen::Matrix::Identity(); + CombinedScenarioRunner runner( + scenario, + testing::Params(Matrix3::Zero(), Matrix3::Zero(), + 0.1 * Matrix6::Identity()), + T / 10, imuBias::ConstantBias(), preinMeasCov); PreintegratedCombinedMeasurements pim = runner.integrate(T); // Make copy for testing different conditions @@ -340,8 +348,8 @@ TEST(CombinedImuFactor, ResetIntegration) { // Test default method pim.resetIntegration(); - Matrix6 expected = Z_6x6; - EXPECT(assert_equal(expected, pim.p().biasAccOmegaInt , 1e-9)); + Matrix6 expected = 0.001 * I_6x6; + EXPECT(assert_equal(expected, pim.p().biasAccOmegaInt, 1e-9)); // Test method where Q_init is provided Matrix6 expected_Q_init = I_6x6 * 0.001; From 2fb93dbdfb53783b775f73da6f127040776f8a4f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 13 Jan 2023 10:15:40 -0500 Subject: [PATCH 09/29] remove biasAccOmegaInit default reset --- gtsam/navigation/CombinedImuFactor.cpp | 3 --- 1 file changed, 3 deletions(-) diff --git a/gtsam/navigation/CombinedImuFactor.cpp b/gtsam/navigation/CombinedImuFactor.cpp index 142faedd3..e5ce5a711 100644 --- a/gtsam/navigation/CombinedImuFactor.cpp +++ b/gtsam/navigation/CombinedImuFactor.cpp @@ -74,9 +74,6 @@ bool PreintegratedCombinedMeasurements::equals( void PreintegratedCombinedMeasurements::resetIntegration() { // Base class method to reset the preintegrated measurements PreintegrationType::resetIntegration(); - // Set the initial bias covariance to - // the estimated covariance from the last step. - p().biasAccOmegaInt = preintMeasCov_.block<6, 6>(9, 9); preintMeasCov_.setZero(); } From 4f9582d0ca588dfb84cbbdc152bfd1992bdc8f13 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 13 Jan 2023 11:14:51 -0500 Subject: [PATCH 10/29] update test --- gtsam/navigation/tests/testCombinedImuFactor.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/navigation/tests/testCombinedImuFactor.cpp b/gtsam/navigation/tests/testCombinedImuFactor.cpp index fcda1da11..339564a2b 100644 --- a/gtsam/navigation/tests/testCombinedImuFactor.cpp +++ b/gtsam/navigation/tests/testCombinedImuFactor.cpp @@ -348,7 +348,7 @@ TEST(CombinedImuFactor, ResetIntegration) { // Test default method pim.resetIntegration(); - Matrix6 expected = 0.001 * I_6x6; + Matrix6 expected = 0.1 * I_6x6; EXPECT(assert_equal(expected, pim.p().biasAccOmegaInt, 1e-9)); // Test method where Q_init is provided From c204524a3b2aa86d6e00316add1f0d5c4c3030bd Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 1 Mar 2023 13:02:55 -0500 Subject: [PATCH 11/29] function to add constant term for correcting HybridGaussianFactorGraph --- gtsam/hybrid/GaussianMixtureFactor.h | 3 + gtsam/hybrid/tests/testHybridEstimation.cpp | 195 ++++++++++++++++++-- 2 files changed, 187 insertions(+), 11 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 2088a7837..8e1a95e9d 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -143,6 +143,9 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { */ double error(const HybridValues &values) const override; + /// Getter for GaussianFactor decision tree + Factors factors() const { return factors_; } + /// Add MixtureFactor to a Sum, syntactic sugar. friend GaussianFactorGraphTree &operator+=( GaussianFactorGraphTree &sum, const GaussianMixtureFactor &factor) { diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 28f47232b..8ec41d51f 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -17,6 +17,7 @@ #include #include +#include #include #include #include @@ -37,12 +38,11 @@ #include "Switching.h" -#include - using namespace std; using namespace gtsam; using symbol_shorthand::X; +using symbol_shorthand::Z; Ordering getOrdering(HybridGaussianFactorGraph& factors, const HybridGaussianFactorGraph& newFactors) { @@ -73,7 +73,7 @@ Ordering getOrdering(HybridGaussianFactorGraph& factors, return ordering; } -TEST(HybridEstimation, Full) { +TEST_DISABLED(HybridEstimation, Full) { size_t K = 6; std::vector measurements = {0, 1, 2, 2, 2, 3}; // Ground truth discrete seq @@ -91,8 +91,7 @@ TEST(HybridEstimation, Full) { hybridOrdering.push_back(M(k)); } - HybridBayesNet::shared_ptr bayesNet = - graph.eliminateSequential(); + HybridBayesNet::shared_ptr bayesNet = graph.eliminateSequential(); EXPECT_LONGS_EQUAL(2 * K - 1, bayesNet->size()); @@ -116,7 +115,7 @@ TEST(HybridEstimation, Full) { /****************************************************************************/ // Test approximate inference with an additional pruning step. -TEST(HybridEstimation, Incremental) { +TEST_DISABLED(HybridEstimation, Incremental) { size_t K = 15; std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, 7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; @@ -284,7 +283,7 @@ AlgebraicDecisionTree getProbPrimeTree( * Test for correctness of different branches of the P'(Continuous | Discrete). * The values should match those of P'(Continuous) for each discrete mode. ********************************************************************************/ -TEST(HybridEstimation, Probability) { +TEST_DISABLED(HybridEstimation, Probability) { constexpr size_t K = 4; std::vector measurements = {0, 1, 2, 2}; double between_sigma = 1.0, measurement_sigma = 0.1; @@ -327,7 +326,7 @@ TEST(HybridEstimation, Probability) { * in the multi-frontal setting. The values should match those of P'(Continuous) * for each discrete mode. */ -TEST(HybridEstimation, ProbabilityMultifrontal) { +TEST_DISABLED(HybridEstimation, ProbabilityMultifrontal) { constexpr size_t K = 4; std::vector measurements = {0, 1, 2, 2}; @@ -338,7 +337,6 @@ TEST(HybridEstimation, ProbabilityMultifrontal) { Switching switching(K, between_sigma, measurement_sigma, measurements, "1/1 1/1"); auto graph = switching.linearizedFactorGraph; - Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph()); // Get the tree of unnormalized probabilities for each mode sequence. AlgebraicDecisionTree expected_probPrimeTree = getProbPrimeTree(graph); @@ -435,7 +433,7 @@ static HybridGaussianFactorGraph::shared_ptr createHybridGaussianFactorGraph() { /********************************************************************************* * Do hybrid elimination and do regression test on discrete conditional. ********************************************************************************/ -TEST(HybridEstimation, eliminateSequentialRegression) { +TEST_DISABLED(HybridEstimation, eliminateSequentialRegression) { // Create the factor graph from the nonlinear factor graph. HybridGaussianFactorGraph::shared_ptr fg = createHybridGaussianFactorGraph(); @@ -470,7 +468,7 @@ TEST(HybridEstimation, eliminateSequentialRegression) { * 3. Sample from the Bayes Net. * 4. Check that the ratio `BN(x)/FG(x) = constant` for all samples `x`. ********************************************************************************/ -TEST(HybridEstimation, CorrectnessViaSampling) { +TEST_DISABLED(HybridEstimation, CorrectnessViaSampling) { // 1. Create the factor graph from the nonlinear factor graph. const auto fg = createHybridGaussianFactorGraph(); @@ -503,6 +501,181 @@ TEST(HybridEstimation, CorrectnessViaSampling) { } } +/****************************************************************************/ +std::shared_ptr addConstantTerm( + const HybridGaussianFactorGraph& gfg, const Key& mode, double noise_tight, + double noise_loose, size_t d, size_t tight_index) { + HybridGaussianFactorGraph updated_gfg; + + // logConstant will be of the tighter model + double logConstant = + -0.5 * d * 1.8378770664093454835606594728112 + log(1.0 / noise_tight); + + for (auto&& f : gfg) { + if (auto gmf = dynamic_pointer_cast(f)) { + auto func = [&](const Assignment& assignment, + const GaussianFactor::shared_ptr& gf) { + if (assignment.at(mode) != tight_index) { + double factor_log_constant = + -0.5 * d * 1.8378770664093454835606594728112 + + log(1.0 / noise_loose); + + GaussianFactorGraph gfg_; + gfg_.push_back(gf); + Vector c(d); + c << std::sqrt(2.0 * (logConstant - factor_log_constant)); + auto constantFactor = std::make_shared(c); + gfg_.push_back(constantFactor); + return std::make_shared(gfg_); + } else { + return dynamic_pointer_cast(gf); + } + }; + auto updated_factors = gmf->factors().apply(func); + auto updated_gmf = std::make_shared( + gmf->continuousKeys(), gmf->discreteKeys(), updated_factors); + updated_gfg.add(updated_gmf); + } else { + updated_gfg.add(f); + } + } + return std::make_shared(updated_gfg); +} + +TEST(HybridEstimation, ModeSelection) { + HybridNonlinearFactorGraph graph; + Values initial; + + auto measurement_model = noiseModel::Isotropic::Sigma(1, 0.1); + auto motion_model = noiseModel::Isotropic::Sigma(1, 1.0); + + graph.emplace_shared>(X(0), 0.0, measurement_model); + graph.emplace_shared>(X(1), 0.0, measurement_model); + + DiscreteKeys modes; + modes.emplace_back(M(0), 2); + + // The size of the noise model + size_t d = 1; + double noise_tight = 0.5, noise_loose = 5.0; + + auto model0 = std::make_shared( + X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_loose)), + model1 = std::make_shared( + X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight)); + + std::vector components = {model0, model1}; + + KeyVector keys = {X(0), X(1)}; + graph.emplace_shared(keys, modes, components); + + initial.insert(X(0), 0.0); + initial.insert(X(1), 0.0); + + auto gfg = graph.linearize(initial); + + gfg = addConstantTerm(*gfg, M(0), noise_tight, noise_loose, d, 1); + + HybridBayesNet::shared_ptr bayesNet = gfg->eliminateSequential(); + + HybridValues delta = bayesNet->optimize(); + EXPECT_LONGS_EQUAL(1, delta.discrete().at(M(0))); + + /**************************************************************/ + HybridBayesNet bn; + const DiscreteKey mode{M(0), 2}; + + bn.push_back( + GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1)); + bn.push_back( + GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1)); + bn.emplace_back(new GaussianMixture( + {Z(0)}, {X(0), X(1)}, {mode}, + {GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), + Z_1x1, noise_loose), + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), + Z_1x1, noise_tight)})); + + VectorValues vv; + vv.insert(Z(0), Z_1x1); + + auto fg = bn.toFactorGraph(vv); + auto expected_posterior = fg.eliminateSequential(); + + // graph.print(); + // gfg->print("Original Gaussian Factor Graph:"); + // fg.print("\n\nFrom Bayes Net"); + + // bayesNet->print("\n\nBayes Net from FG"); + // expected_posterior->print("\n\n"); + EXPECT(assert_equal(*expected_posterior, *bayesNet, 1e-6)); +} + +/****************************************************************************/ +TEST(HybridEstimation, ModeSelection2) { + using symbol_shorthand::Z; + + // The size of the noise model + size_t d = 1; + double noise_tight = 0.5, noise_loose = 5.0; + + HybridBayesNet bn; + const DiscreteKey mode{M(0), 2}; + + bn.push_back( + GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1)); + bn.push_back( + GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1)); + bn.emplace_back(new GaussianMixture( + {Z(0)}, {X(0), X(1)}, {mode}, + {GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), + Z_1x1, noise_loose), + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), + Z_1x1, noise_tight)})); + + VectorValues vv; + vv.insert(Z(0), Z_1x1); + + auto fg = bn.toFactorGraph(vv); + + auto expected_posterior = fg.eliminateSequential(); + // expected_posterior->print("\n\n\nLikelihood BN:"); + + std::cout << "\n\n==================\n\n" << std::endl; + HybridNonlinearFactorGraph graph; + Values initial; + + auto measurement_model = noiseModel::Isotropic::Sigma(1, 0.1); + auto motion_model = noiseModel::Isotropic::Sigma(1, 1.0); + + graph.emplace_shared>(X(0), 0.0, measurement_model); + graph.emplace_shared>(X(1), 0.0, measurement_model); + + DiscreteKeys modes; + modes.emplace_back(M(0), 2); + + auto model0 = std::make_shared( + X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_loose)), + model1 = std::make_shared( + X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight)); + + std::vector components = {model0, model1}; + + KeyVector keys = {X(0), X(1)}; + graph.emplace_shared(keys, modes, components); + + initial.insert(X(0), 0.0); + initial.insert(X(1), 0.0); + + auto gfg = graph.linearize(initial); + gfg = addConstantTerm(*gfg, M(0), noise_tight, noise_loose, d, 1); + + HybridBayesNet::shared_ptr bayesNet = gfg->eliminateSequential(); + + // bayesNet->print(); + EXPECT(assert_equal(*expected_posterior, *bayesNet, 1e-6)); +} + /* ************************************************************************* */ int main() { TestResult tr; From 8ba5da44a64fdddac1e1f3c3499c31d9f15916f1 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 1 Mar 2023 14:44:40 -0500 Subject: [PATCH 12/29] some cleanup --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 9 ++++----- gtsam/linear/HessianFactor.h | 2 +- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index aa29c924e..897d56272 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -190,8 +190,7 @@ discreteElimination(const HybridGaussianFactorGraph &factors, /* ************************************************************************ */ // If any GaussianFactorGraph in the decision tree contains a nullptr, convert // that leaf to an empty GaussianFactorGraph. Needed since the DecisionTree will -// otherwise create a GFG with a single (null) factor. -// TODO(dellaert): still a mystery to me why this is needed. +// otherwise create a GFG with a single (null) factor, which doesn't register as null. GaussianFactorGraphTree removeEmpty(const GaussianFactorGraphTree &sum) { auto emptyGaussian = [](const GaussianFactorGraph &graph) { bool hasNull = @@ -275,9 +274,9 @@ hybridElimination(const HybridGaussianFactorGraph &factors, }; DecisionTree probabilities(eliminationResults, probability); - return {std::make_shared(gaussianMixture), - std::make_shared(discreteSeparator, - probabilities)}; + return { + std::make_shared(gaussianMixture), + std::make_shared(discreteSeparator, probabilities)}; } else { // Otherwise, we create a resulting GaussianMixtureFactor on the separator, // taking care to correct for conditional constant. diff --git a/gtsam/linear/HessianFactor.h b/gtsam/linear/HessianFactor.h index eb1fe2de2..8cbabcd5d 100644 --- a/gtsam/linear/HessianFactor.h +++ b/gtsam/linear/HessianFactor.h @@ -131,7 +131,7 @@ namespace gtsam { * term, and f the constant term. * JacobianFactor error is \f[ 0.5* (Ax-b)' M (Ax-b) = 0.5*x'A'MAx - x'A'Mb + 0.5*b'Mb \f] * HessianFactor error is \f[ 0.5*(x'Gx - 2x'g + f) = 0.5*x'Gx - x'*g + 0.5*f \f] - * So, with \f$ A = [A1 A2] \f$ and \f$ G=A*'M*A = [A1';A2']*M*[A1 A2] \f$ we have + * So, with \f$ A = [A1 A2] \f$ and \f$ G=A'*M*A = [A1';A2']*M*[A1 A2] \f$ we have \code n1*n1 G11 = A1'*M*A1 n1*n2 G12 = A1'*M*A2 From 01a959b9e56bc94fd647aa5afafe22b7d967c629 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 1 Mar 2023 14:46:47 -0500 Subject: [PATCH 13/29] differing noise models for multi-dimensional problem --- gtsam/hybrid/tests/testHybridEstimation.cpp | 69 ++++++++++++--------- 1 file changed, 39 insertions(+), 30 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 8ec41d51f..c917ffff9 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -73,7 +73,7 @@ Ordering getOrdering(HybridGaussianFactorGraph& factors, return ordering; } -TEST_DISABLED(HybridEstimation, Full) { +TEST(HybridEstimation, Full) { size_t K = 6; std::vector measurements = {0, 1, 2, 2, 2, 3}; // Ground truth discrete seq @@ -115,7 +115,7 @@ TEST_DISABLED(HybridEstimation, Full) { /****************************************************************************/ // Test approximate inference with an additional pruning step. -TEST_DISABLED(HybridEstimation, Incremental) { +TEST(HybridEstimation, Incremental) { size_t K = 15; std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, 7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; @@ -283,7 +283,7 @@ AlgebraicDecisionTree getProbPrimeTree( * Test for correctness of different branches of the P'(Continuous | Discrete). * The values should match those of P'(Continuous) for each discrete mode. ********************************************************************************/ -TEST_DISABLED(HybridEstimation, Probability) { +TEST(HybridEstimation, Probability) { constexpr size_t K = 4; std::vector measurements = {0, 1, 2, 2}; double between_sigma = 1.0, measurement_sigma = 0.1; @@ -326,7 +326,7 @@ TEST_DISABLED(HybridEstimation, Probability) { * in the multi-frontal setting. The values should match those of P'(Continuous) * for each discrete mode. */ -TEST_DISABLED(HybridEstimation, ProbabilityMultifrontal) { +TEST(HybridEstimation, ProbabilityMultifrontal) { constexpr size_t K = 4; std::vector measurements = {0, 1, 2, 2}; @@ -433,7 +433,7 @@ static HybridGaussianFactorGraph::shared_ptr createHybridGaussianFactorGraph() { /********************************************************************************* * Do hybrid elimination and do regression test on discrete conditional. ********************************************************************************/ -TEST_DISABLED(HybridEstimation, eliminateSequentialRegression) { +TEST(HybridEstimation, eliminateSequentialRegression) { // Create the factor graph from the nonlinear factor graph. HybridGaussianFactorGraph::shared_ptr fg = createHybridGaussianFactorGraph(); @@ -468,7 +468,7 @@ TEST_DISABLED(HybridEstimation, eliminateSequentialRegression) { * 3. Sample from the Bayes Net. * 4. Check that the ratio `BN(x)/FG(x) = constant` for all samples `x`. ********************************************************************************/ -TEST_DISABLED(HybridEstimation, CorrectnessViaSampling) { +TEST(HybridEstimation, CorrectnessViaSampling) { // 1. Create the factor graph from the nonlinear factor graph. const auto fg = createHybridGaussianFactorGraph(); @@ -502,14 +502,19 @@ TEST_DISABLED(HybridEstimation, CorrectnessViaSampling) { } /****************************************************************************/ +/** + * Helper function to add the constant term corresponding to + * the difference in noise models. + */ std::shared_ptr addConstantTerm( const HybridGaussianFactorGraph& gfg, const Key& mode, double noise_tight, double noise_loose, size_t d, size_t tight_index) { HybridGaussianFactorGraph updated_gfg; + constexpr double log2pi = 1.8378770664093454835606594728112; // logConstant will be of the tighter model - double logConstant = - -0.5 * d * 1.8378770664093454835606594728112 + log(1.0 / noise_tight); + double logNormalizationConstant = log(1.0 / noise_tight); + double logConstant = -0.5 * d * log2pi + logNormalizationConstant; for (auto&& f : gfg) { if (auto gmf = dynamic_pointer_cast(f)) { @@ -517,13 +522,15 @@ std::shared_ptr addConstantTerm( const GaussianFactor::shared_ptr& gf) { if (assignment.at(mode) != tight_index) { double factor_log_constant = - -0.5 * d * 1.8378770664093454835606594728112 + - log(1.0 / noise_loose); + -0.5 * d * log2pi + log(1.0 / noise_loose); GaussianFactorGraph gfg_; gfg_.push_back(gf); Vector c(d); - c << std::sqrt(2.0 * (logConstant - factor_log_constant)); + for (size_t i = 0; i < d; i++) { + c(i) = std::sqrt(2.0 * (logConstant - factor_log_constant)); + } + auto constantFactor = std::make_shared(c); gfg_.push_back(constantFactor); return std::make_shared(gfg_); @@ -542,6 +549,7 @@ std::shared_ptr addConstantTerm( return std::make_shared(updated_gfg); } +/****************************************************************************/ TEST(HybridEstimation, ModeSelection) { HybridNonlinearFactorGraph graph; Values initial; @@ -616,56 +624,57 @@ TEST(HybridEstimation, ModeSelection2) { using symbol_shorthand::Z; // The size of the noise model - size_t d = 1; + size_t d = 3; double noise_tight = 0.5, noise_loose = 5.0; HybridBayesNet bn; const DiscreteKey mode{M(0), 2}; bn.push_back( - GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1)); + GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1)); bn.push_back( - GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1)); + GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1)); bn.emplace_back(new GaussianMixture( {Z(0)}, {X(0), X(1)}, {mode}, - {GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), - Z_1x1, noise_loose), - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), - Z_1x1, noise_tight)})); + {GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1), + Z_3x1, noise_loose), + GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1), + Z_3x1, noise_tight)})); VectorValues vv; - vv.insert(Z(0), Z_1x1); + vv.insert(Z(0), Z_3x1); auto fg = bn.toFactorGraph(vv); auto expected_posterior = fg.eliminateSequential(); // expected_posterior->print("\n\n\nLikelihood BN:"); - std::cout << "\n\n==================\n\n" << std::endl; + // std::cout << "\n\n==================\n\n" << std::endl; + HybridNonlinearFactorGraph graph; Values initial; - auto measurement_model = noiseModel::Isotropic::Sigma(1, 0.1); - auto motion_model = noiseModel::Isotropic::Sigma(1, 1.0); + auto measurement_model = noiseModel::Isotropic::Sigma(d, 0.1); + auto motion_model = noiseModel::Isotropic::Sigma(d, 1.0); - graph.emplace_shared>(X(0), 0.0, measurement_model); - graph.emplace_shared>(X(1), 0.0, measurement_model); + graph.emplace_shared>(X(0), Z_3x1, measurement_model); + graph.emplace_shared>(X(1), Z_3x1, measurement_model); DiscreteKeys modes; modes.emplace_back(M(0), 2); - auto model0 = std::make_shared( - X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_loose)), - model1 = std::make_shared( - X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight)); + auto model0 = std::make_shared>( + X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_loose)), + model1 = std::make_shared>( + X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_tight)); std::vector components = {model0, model1}; KeyVector keys = {X(0), X(1)}; graph.emplace_shared(keys, modes, components); - initial.insert(X(0), 0.0); - initial.insert(X(1), 0.0); + initial.insert(X(0), Z_3x1); + initial.insert(X(1), Z_3x1); auto gfg = graph.linearize(initial); gfg = addConstantTerm(*gfg, M(0), noise_tight, noise_loose, d, 1); From 2e1c51478cd83aa7f2c176f71a9b68adc88a5307 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 1 Mar 2023 15:40:11 -0500 Subject: [PATCH 14/29] fix docstring --- gtsam/navigation/CombinedImuFactor.h | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/gtsam/navigation/CombinedImuFactor.h b/gtsam/navigation/CombinedImuFactor.h index 71169ab2c..42e3db34e 100644 --- a/gtsam/navigation/CombinedImuFactor.h +++ b/gtsam/navigation/CombinedImuFactor.h @@ -131,8 +131,7 @@ public: * @brief Re-initialize PreintegratedCombinedMeasurements with initial bias * covariance estimate. * - * @param Q_init The initial bias covariance estimates as 6x6 matrix. If not - * provided, it uses the last values from the preintMeasCov. + * @param Q_init The initial bias covariance estimates as a 6x6 matrix. */ void resetIntegration(const gtsam::Matrix6& Q_init); From e4b1505e38c887fb6c63c30b4a800a2eacc33511 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 1 Mar 2023 15:40:31 -0500 Subject: [PATCH 15/29] fix ImuFactor examples to show correct prediction values --- python/gtsam/examples/CombinedImuFactorExample.py | 13 +++++++++---- python/gtsam/examples/ImuFactorExample.py | 11 ++++++++--- 2 files changed, 17 insertions(+), 7 deletions(-) diff --git a/python/gtsam/examples/CombinedImuFactorExample.py b/python/gtsam/examples/CombinedImuFactorExample.py index 82714dd7c..e3f8c6b34 100644 --- a/python/gtsam/examples/CombinedImuFactorExample.py +++ b/python/gtsam/examples/CombinedImuFactorExample.py @@ -5,7 +5,7 @@ All Rights Reserved See LICENSE for the license information -A script validating and demonstrating the CobiendImuFactor inference. +A script validating and demonstrating inference with the CombinedImuFactor. Author: Varun Agrawal """ @@ -17,15 +17,15 @@ from __future__ import print_function import argparse import math -import gtsam import matplotlib.pyplot as plt import numpy as np from gtsam.symbol_shorthand import B, V, X from gtsam.utils.plot import plot_pose3 from mpl_toolkits.mplot3d import Axes3D - from PreintegrationExample import POSES_FIG, PreintegrationExample +import gtsam + GRAVITY = 9.81 np.set_printoptions(precision=3, suppress=True) @@ -192,7 +192,9 @@ class CombinedImuFactorExample(PreintegrationExample): if verbose: print(factor) - print(pim.predict(initial_state_i, self.actualBias)) + print("Predicted state at {0}:\n{1}".format( + t + self.dt, + pim.predict(initial_state_i, self.actualBias))) pim.resetIntegration() @@ -204,6 +206,9 @@ class CombinedImuFactorExample(PreintegrationExample): print("Actual state at {0}:\n{1}".format( t + self.dt, actual_state_i)) + # Set initial state to current + initial_state_i = actual_state_i + noisy_state_i = gtsam.NavState( actual_state_i.pose().compose(poseNoise), actual_state_i.velocity() + np.random.randn(3) * 0.1) diff --git a/python/gtsam/examples/ImuFactorExample.py b/python/gtsam/examples/ImuFactorExample.py index c86a4e216..a0c833274 100644 --- a/python/gtsam/examples/ImuFactorExample.py +++ b/python/gtsam/examples/ImuFactorExample.py @@ -17,15 +17,15 @@ from __future__ import print_function import argparse import math -import gtsam import matplotlib.pyplot as plt import numpy as np from gtsam.symbol_shorthand import B, V, X from gtsam.utils.plot import plot_pose3 from mpl_toolkits.mplot3d import Axes3D - from PreintegrationExample import POSES_FIG, PreintegrationExample +import gtsam + BIAS_KEY = B(0) GRAVITY = 9.81 @@ -185,7 +185,9 @@ class ImuFactorExample(PreintegrationExample): if verbose: print(factor) - print(pim.predict(initial_state_i, self.actualBias)) + print("Predicted state at {0}:\n{1}".format( + t + self.dt, + pim.predict(initial_state_i, self.actualBias))) pim.resetIntegration() @@ -197,6 +199,9 @@ class ImuFactorExample(PreintegrationExample): print("Actual state at {0}:\n{1}".format( t + self.dt, actual_state_i)) + # Set initial state to current + initial_state_i = actual_state_i + noisy_state_i = gtsam.NavState( actual_state_i.pose().compose(poseNoise), actual_state_i.velocity() + np.random.randn(3) * 0.1) From cc9a2dc404e57ec51f919b12c7375d8c85dd73e9 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 1 Mar 2023 15:51:11 -0500 Subject: [PATCH 16/29] convert from boost to std shared pointers --- gtsam/navigation/PreintegrationCombinedParams.h | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/gtsam/navigation/PreintegrationCombinedParams.h b/gtsam/navigation/PreintegrationCombinedParams.h index 2c99463bc..30441ec36 100644 --- a/gtsam/navigation/PreintegrationCombinedParams.h +++ b/gtsam/navigation/PreintegrationCombinedParams.h @@ -59,17 +59,17 @@ struct GTSAM_EXPORT PreintegrationCombinedParams : PreintegrationParams { // Default Params for a Z-down navigation frame, such as NED: gravity points // along positive Z-axis - static boost::shared_ptr MakeSharedD( + static std::shared_ptr MakeSharedD( double g = 9.81) { - return boost::shared_ptr( + return std::shared_ptr( new PreintegrationCombinedParams(Vector3(0, 0, g))); } // Default Params for a Z-up navigation frame, such as ENU: gravity points // along negative Z-axis - static boost::shared_ptr MakeSharedU( + static std::shared_ptr MakeSharedU( double g = 9.81) { - return boost::shared_ptr( + return std::shared_ptr( new PreintegrationCombinedParams(Vector3(0, 0, -g))); } @@ -86,6 +86,7 @@ struct GTSAM_EXPORT PreintegrationCombinedParams : PreintegrationParams { const Matrix6& getBiasAccOmegaInit() const { return biasAccOmegaInt; } private: +#ifdef GTSAM_ENABLE_BOOST_SERIALIZATION /** Serialization function */ friend class boost::serialization::access; template @@ -96,6 +97,7 @@ struct GTSAM_EXPORT PreintegrationCombinedParams : PreintegrationParams { ar& BOOST_SERIALIZATION_NVP(biasOmegaCovariance); ar& BOOST_SERIALIZATION_NVP(biasAccOmegaInt); } +#endif public: GTSAM_MAKE_ALIGNED_OPERATOR_NEW From 05f741e08361b55a212adf197cb211275e58ab32 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 1 Mar 2023 16:25:44 -0500 Subject: [PATCH 17/29] include bitset header --- gtsam/hybrid/tests/testHybridEstimation.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index c917ffff9..f41bee103 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -36,6 +36,8 @@ // Include for test suite #include +#include + #include "Switching.h" using namespace std; From 9d470e87a570f055653e70c44169d62ddb15578b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 1 Mar 2023 16:44:13 -0500 Subject: [PATCH 18/29] install boost for Ubuntu with apt --- .github/scripts/boost.sh | 18 ------------------ .github/workflows/build-linux.yml | 3 +-- .github/workflows/build-special.yml | 3 +-- 3 files changed, 2 insertions(+), 22 deletions(-) delete mode 100644 .github/scripts/boost.sh diff --git a/.github/scripts/boost.sh b/.github/scripts/boost.sh deleted file mode 100644 index 3c7e01274..000000000 --- a/.github/scripts/boost.sh +++ /dev/null @@ -1,18 +0,0 @@ -### Script to install Boost -BOOST_FOLDER=boost_${BOOST_VERSION//./_} - -# Download Boost -wget https://boostorg.jfrog.io/artifactory/main/release/${BOOST_VERSION}/source/${BOOST_FOLDER}.tar.gz - -# Unzip -tar -zxf ${BOOST_FOLDER}.tar.gz - -# Bootstrap -cd ${BOOST_FOLDER}/ -./bootstrap.sh --with-libraries=serialization,filesystem,thread,system,atomic,date_time,timer,chrono,program_options,regex - -# Build and install -sudo ./b2 -j$(nproc) install - -# Rebuild ld cache -sudo ldconfig diff --git a/.github/workflows/build-linux.yml b/.github/workflows/build-linux.yml index b678a71db..c801bb275 100644 --- a/.github/workflows/build-linux.yml +++ b/.github/workflows/build-linux.yml @@ -12,7 +12,6 @@ jobs: CTEST_PARALLEL_LEVEL: 2 CMAKE_BUILD_TYPE: ${{ matrix.build_type }} GTSAM_BUILD_UNSTABLE: ${{ matrix.build_unstable }} - BOOST_VERSION: 1.67.0 strategy: fail-fast: true @@ -82,7 +81,7 @@ jobs: - name: Install Boost run: | - bash .github/scripts/boost.sh + sudo apt-get -y install libboost-all-dev - name: Build and Test run: bash .github/scripts/unix.sh -t diff --git a/.github/workflows/build-special.yml b/.github/workflows/build-special.yml index 7582bf41c..02d19668c 100644 --- a/.github/workflows/build-special.yml +++ b/.github/workflows/build-special.yml @@ -12,7 +12,6 @@ jobs: CTEST_PARALLEL_LEVEL: 2 CMAKE_BUILD_TYPE: ${{ matrix.build_type }} GTSAM_BUILD_UNSTABLE: ON - BOOST_VERSION: 1.67.0 strategy: fail-fast: false @@ -100,7 +99,7 @@ jobs: - name: Install Boost if: runner.os == 'Linux' run: | - bash .github/scripts/boost.sh + sudo apt-get -y install libboost-all-dev - name: Install (macOS) if: runner.os == 'macOS' From d66b8b4fee56a70b94450afb6a4ac5e22b90f71a Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 1 Mar 2023 17:18:04 -0500 Subject: [PATCH 19/29] remove old boost specific flags --- .github/scripts/unix.sh | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/scripts/unix.sh b/.github/scripts/unix.sh index 1676ad537..e497d4764 100644 --- a/.github/scripts/unix.sh +++ b/.github/scripts/unix.sh @@ -72,8 +72,6 @@ function configure() -DGTSAM_USE_SYSTEM_METIS=${GTSAM_USE_SYSTEM_METIS:-OFF} \ -DGTSAM_BUILD_WITH_MARCH_NATIVE=OFF \ -DGTSAM_SINGLE_TEST_EXE=OFF \ - -DBOOST_ROOT=$BOOST_ROOT \ - -DBoost_NO_SYSTEM_PATHS=ON \ -DBoost_ARCHITECTURE=-x64 } From 32e266d5ab1cc56481743f5412cc6f84261453b5 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 1 Mar 2023 18:01:37 -0500 Subject: [PATCH 20/29] remove redundant cmake flag in unix.sh and set max processes for linux to 4 --- .github/scripts/unix.sh | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/scripts/unix.sh b/.github/scripts/unix.sh index e497d4764..67881921c 100644 --- a/.github/scripts/unix.sh +++ b/.github/scripts/unix.sh @@ -71,8 +71,7 @@ function configure() -DGTSAM_USE_SYSTEM_EIGEN=${GTSAM_USE_SYSTEM_EIGEN:-OFF} \ -DGTSAM_USE_SYSTEM_METIS=${GTSAM_USE_SYSTEM_METIS:-OFF} \ -DGTSAM_BUILD_WITH_MARCH_NATIVE=OFF \ - -DGTSAM_SINGLE_TEST_EXE=OFF \ - -DBoost_ARCHITECTURE=-x64 + -DGTSAM_SINGLE_TEST_EXE=OFF } @@ -95,7 +94,7 @@ function build () if [ "$(uname)" == "Linux" ]; then if (($(nproc) > 2)); then - make -j$(nproc) + make -j4 else make -j2 fi From e0ac295ebfa79da5c577f7cbeca1b439d64e04f2 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 1 Mar 2023 18:01:44 -0500 Subject: [PATCH 21/29] format Python CI yaml file --- .github/workflows/build-python.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/build-python.yml b/.github/workflows/build-python.yml index 442e26e47..f09d589dc 100644 --- a/.github/workflows/build-python.yml +++ b/.github/workflows/build-python.yml @@ -53,6 +53,7 @@ jobs: steps: - name: Checkout uses: actions/checkout@v3 + - name: Install (Linux) if: runner.os == 'Linux' run: | @@ -79,6 +80,7 @@ jobs: echo "CC=clang-${{ matrix.version }}" >> $GITHUB_ENV echo "CXX=clang++-${{ matrix.version }}" >> $GITHUB_ENV fi + - name: Install (macOS) if: runner.os == 'macOS' run: | @@ -88,22 +90,27 @@ jobs: sudo xcode-select -switch /Applications/Xcode.app echo "CC=clang" >> $GITHUB_ENV echo "CXX=clang++" >> $GITHUB_ENV + - name: Set GTSAM_WITH_TBB Flag if: matrix.flag == 'tbb' run: | echo "GTSAM_WITH_TBB=ON" >> $GITHUB_ENV echo "GTSAM Uses TBB" + - name: Set Swap Space if: runner.os == 'Linux' uses: pierotofy/set-swap-space@master with: swap-size-gb: 6 + - name: Install Dependencies run: | bash .github/scripts/python.sh -d + - name: Build run: | bash .github/scripts/python.sh -b + - name: Test run: | bash .github/scripts/python.sh -t From 62cf5bff97286f4137b5996dcf236dbaaf748bfa Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 3 Mar 2023 12:46:59 -0500 Subject: [PATCH 22/29] don't print warnings to CI --- .github/scripts/unix.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/scripts/unix.sh b/.github/scripts/unix.sh index 67881921c..af9ac8991 100644 --- a/.github/scripts/unix.sh +++ b/.github/scripts/unix.sh @@ -58,8 +58,10 @@ function configure() fi # GTSAM_BUILD_WITH_MARCH_NATIVE=OFF: to avoid crashes in builder VMs + # CMAKE_CXX_FLAGS="-w": Suppress warnings to avoid IO latency in CI logs cmake $SOURCE_DIR \ -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE:-Debug} \ + -DCMAKE_CXX_FLAGS="-w" \ -DGTSAM_BUILD_TESTS=${GTSAM_BUILD_TESTS:-OFF} \ -DGTSAM_BUILD_UNSTABLE=${GTSAM_BUILD_UNSTABLE:-ON} \ -DGTSAM_WITH_TBB=${GTSAM_WITH_TBB:-OFF} \ From c42ea14ce1273f3749e573bd3922728557e3e468 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 3 Mar 2023 12:48:04 -0500 Subject: [PATCH 23/29] increase swap space for special builds --- .github/workflows/build-special.yml | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build-special.yml b/.github/workflows/build-special.yml index 02d19668c..fda1c2b91 100644 --- a/.github/workflows/build-special.yml +++ b/.github/workflows/build-special.yml @@ -92,9 +92,15 @@ jobs: sudo apt-get -y install cmake build-essential pkg-config libpython3-dev python3-numpy libicu-dev - sudo apt-get install -y clang-${{ matrix.version }} g++-multilib - echo "CC=clang-${{ matrix.version }}" >> $GITHUB_ENV - echo "CXX=clang++-${{ matrix.version }}" >> $GITHUB_ENV + if [ "${{ matrix.compiler }}" = "gcc" ]; then + sudo apt-get install -y g++-${{ matrix.version }} g++-${{ matrix.version }}-multilib + echo "CC=gcc-${{ matrix.version }}" >> $GITHUB_ENV + echo "CXX=g++-${{ matrix.version }}" >> $GITHUB_ENV + else + sudo apt-get install -y clang-${{ matrix.version }} g++-multilib + echo "CC=clang-${{ matrix.version }}" >> $GITHUB_ENV + echo "CXX=clang++-${{ matrix.version }}" >> $GITHUB_ENV + fi - name: Install Boost if: runner.os == 'Linux' @@ -150,6 +156,12 @@ jobs: echo "GTSAM_USE_BOOST_FEATURES=OFF" >> $GITHUB_ENV echo "GTSAM will not use BOOST" + - name: Set Swap Space + if: runner.os == 'Linux' + uses: pierotofy/set-swap-space@master + with: + swap-size-gb: 12 + - name: Build & Test run: | bash .github/scripts/unix.sh -t From 0e6be7393b090e841811d27eb43adc2f70acf11e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 3 Mar 2023 12:48:28 -0500 Subject: [PATCH 24/29] remove redundant push action from build-linux --- .github/workflows/build-linux.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-linux.yml b/.github/workflows/build-linux.yml index c801bb275..129b5aaaf 100644 --- a/.github/workflows/build-linux.yml +++ b/.github/workflows/build-linux.yml @@ -1,6 +1,6 @@ name: Linux CI -on: [push, pull_request] +on: [pull_request] jobs: build: From a8aac45fd6dfa53c11301ec32ebb56f0601a91fd Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 3 Mar 2023 12:48:52 -0500 Subject: [PATCH 25/29] create shared library for special builds to save memory --- .github/scripts/unix.sh | 1 + .github/workflows/build-special.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/scripts/unix.sh b/.github/scripts/unix.sh index af9ac8991..c48921504 100644 --- a/.github/scripts/unix.sh +++ b/.github/scripts/unix.sh @@ -72,6 +72,7 @@ function configure() -DGTSAM_POSE3_EXPMAP=${GTSAM_POSE3_EXPMAP:-ON} \ -DGTSAM_USE_SYSTEM_EIGEN=${GTSAM_USE_SYSTEM_EIGEN:-OFF} \ -DGTSAM_USE_SYSTEM_METIS=${GTSAM_USE_SYSTEM_METIS:-OFF} \ + -DGTSAM_FORCE_SHARED_LIB=${GTSAM_FORCE_SHARED_LIB:-OFF} \ -DGTSAM_BUILD_WITH_MARCH_NATIVE=OFF \ -DGTSAM_SINGLE_TEST_EXE=OFF } diff --git a/.github/workflows/build-special.yml b/.github/workflows/build-special.yml index fda1c2b91..0d1cd5bdd 100644 --- a/.github/workflows/build-special.yml +++ b/.github/workflows/build-special.yml @@ -12,6 +12,7 @@ jobs: CTEST_PARALLEL_LEVEL: 2 CMAKE_BUILD_TYPE: ${{ matrix.build_type }} GTSAM_BUILD_UNSTABLE: ON + GTSAM_FORCE_SHARED_LIB: ON # Make shared library to save memory on CI strategy: fail-fast: false From 9211dddf6cdc45d9d52de9c06344ef3cedcba3ad Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 4 Mar 2023 11:50:41 -0500 Subject: [PATCH 26/29] improve the mixture factor handling so it uses the factor directly --- gtsam/hybrid/tests/testHybridEstimation.cpp | 70 ++++++++++----------- 1 file changed, 34 insertions(+), 36 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index f41bee103..836b5cb23 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -508,47 +508,40 @@ TEST(HybridEstimation, CorrectnessViaSampling) { * Helper function to add the constant term corresponding to * the difference in noise models. */ -std::shared_ptr addConstantTerm( - const HybridGaussianFactorGraph& gfg, const Key& mode, double noise_tight, - double noise_loose, size_t d, size_t tight_index) { - HybridGaussianFactorGraph updated_gfg; +std::shared_ptr mixedVarianceFactor( + const MixtureFactor& mf, const Values& initial, const Key& mode, + double noise_tight, double noise_loose, size_t d, size_t tight_index) { + GaussianMixtureFactor::shared_ptr gmf = mf.linearize(initial); constexpr double log2pi = 1.8378770664093454835606594728112; // logConstant will be of the tighter model double logNormalizationConstant = log(1.0 / noise_tight); double logConstant = -0.5 * d * log2pi + logNormalizationConstant; - for (auto&& f : gfg) { - if (auto gmf = dynamic_pointer_cast(f)) { - auto func = [&](const Assignment& assignment, - const GaussianFactor::shared_ptr& gf) { - if (assignment.at(mode) != tight_index) { - double factor_log_constant = - -0.5 * d * log2pi + log(1.0 / noise_loose); + auto func = [&](const Assignment& assignment, + const GaussianFactor::shared_ptr& gf) { + if (assignment.at(mode) != tight_index) { + double factor_log_constant = -0.5 * d * log2pi + log(1.0 / noise_loose); - GaussianFactorGraph gfg_; - gfg_.push_back(gf); - Vector c(d); - for (size_t i = 0; i < d; i++) { - c(i) = std::sqrt(2.0 * (logConstant - factor_log_constant)); - } + GaussianFactorGraph _gfg; + _gfg.push_back(gf); + Vector c(d); + for (size_t i = 0; i < d; i++) { + c(i) = std::sqrt(2.0 * (logConstant - factor_log_constant)); + } - auto constantFactor = std::make_shared(c); - gfg_.push_back(constantFactor); - return std::make_shared(gfg_); - } else { - return dynamic_pointer_cast(gf); - } - }; - auto updated_factors = gmf->factors().apply(func); - auto updated_gmf = std::make_shared( - gmf->continuousKeys(), gmf->discreteKeys(), updated_factors); - updated_gfg.add(updated_gmf); + auto constantFactor = std::make_shared(c); + _gfg.push_back(constantFactor); + return std::make_shared(_gfg); } else { - updated_gfg.add(f); + return dynamic_pointer_cast(gf); } - } - return std::make_shared(updated_gfg); + }; + auto updated_components = gmf->factors().apply(func); + auto updated_gmf = std::make_shared( + gmf->continuousKeys(), gmf->discreteKeys(), updated_components); + + return updated_gmf; } /****************************************************************************/ @@ -577,14 +570,16 @@ TEST(HybridEstimation, ModeSelection) { std::vector components = {model0, model1}; KeyVector keys = {X(0), X(1)}; - graph.emplace_shared(keys, modes, components); + MixtureFactor mf(keys, modes, components); initial.insert(X(0), 0.0); initial.insert(X(1), 0.0); - auto gfg = graph.linearize(initial); + auto gmf = + mixedVarianceFactor(mf, initial, M(0), noise_tight, noise_loose, d, 1); + graph.add(gmf); - gfg = addConstantTerm(*gfg, M(0), noise_tight, noise_loose, d, 1); + auto gfg = graph.linearize(initial); HybridBayesNet::shared_ptr bayesNet = gfg->eliminateSequential(); @@ -673,13 +668,16 @@ TEST(HybridEstimation, ModeSelection2) { std::vector components = {model0, model1}; KeyVector keys = {X(0), X(1)}; - graph.emplace_shared(keys, modes, components); + MixtureFactor mf(keys, modes, components); initial.insert(X(0), Z_3x1); initial.insert(X(1), Z_3x1); + auto gmf = + mixedVarianceFactor(mf, initial, M(0), noise_tight, noise_loose, d, 1); + graph.add(gmf); + auto gfg = graph.linearize(initial); - gfg = addConstantTerm(*gfg, M(0), noise_tight, noise_loose, d, 1); HybridBayesNet::shared_ptr bayesNet = gfg->eliminateSequential(); From 3f89c7af4988cbf4c26c3b1cb05f9856b77e9d5e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 4 Mar 2023 11:50:51 -0500 Subject: [PATCH 27/29] fix typo --- gtsam/hybrid/HybridSmoother.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index 549c71714..43e75770d 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -45,7 +45,7 @@ Ordering HybridSmoother::getOrdering( std::copy(allDiscrete.begin(), allDiscrete.end(), std::back_inserter(newKeysDiscreteLast)); - const VariableIndex index(newFactors); + const VariableIndex index(factors); // Get an ordering where the new keys are eliminated last Ordering ordering = Ordering::ColamdConstrainedLast( From 28217ea7679ce75f4910c7dd89f113a83a11c41e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 4 Mar 2023 20:28:28 -0500 Subject: [PATCH 28/29] some fixes --- gtsam/hybrid/GaussianMixtureFactor.h | 2 +- gtsam/hybrid/tests/testHybridEstimation.cpp | 13 ++----------- 2 files changed, 3 insertions(+), 12 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 8e1a95e9d..1325cfe93 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -144,7 +144,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { double error(const HybridValues &values) const override; /// Getter for GaussianFactor decision tree - Factors factors() const { return factors_; } + const Factors &factors() const { return factors_; } /// Add MixtureFactor to a Sum, syntactic sugar. friend GaussianFactorGraphTree &operator+=( diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 836b5cb23..e74990fe6 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -530,8 +530,7 @@ std::shared_ptr mixedVarianceFactor( c(i) = std::sqrt(2.0 * (logConstant - factor_log_constant)); } - auto constantFactor = std::make_shared(c); - _gfg.push_back(constantFactor); + _gfg.emplace_shared(c); return std::make_shared(_gfg); } else { return dynamic_pointer_cast(gf); @@ -607,12 +606,6 @@ TEST(HybridEstimation, ModeSelection) { auto fg = bn.toFactorGraph(vv); auto expected_posterior = fg.eliminateSequential(); - // graph.print(); - // gfg->print("Original Gaussian Factor Graph:"); - // fg.print("\n\nFrom Bayes Net"); - - // bayesNet->print("\n\nBayes Net from FG"); - // expected_posterior->print("\n\n"); EXPECT(assert_equal(*expected_posterior, *bayesNet, 1e-6)); } @@ -644,9 +637,8 @@ TEST(HybridEstimation, ModeSelection2) { auto fg = bn.toFactorGraph(vv); auto expected_posterior = fg.eliminateSequential(); - // expected_posterior->print("\n\n\nLikelihood BN:"); - // std::cout << "\n\n==================\n\n" << std::endl; + // ===================================== HybridNonlinearFactorGraph graph; Values initial; @@ -681,7 +673,6 @@ TEST(HybridEstimation, ModeSelection2) { HybridBayesNet::shared_ptr bayesNet = gfg->eliminateSequential(); - // bayesNet->print(); EXPECT(assert_equal(*expected_posterior, *bayesNet, 1e-6)); } From 31289ebda293a8e8e34c7a2bf32a0363325d366d Mon Sep 17 00:00:00 2001 From: Jose Luis Blanco-Claraco Date: Mon, 6 Mar 2023 00:58:23 +0100 Subject: [PATCH 29/29] Update CodingGuidelines.lyx Integrated upstream from https://salsa.debian.org/science-team/gtsam/-/blob/master/debian/patches/0001-Added-forgotten-usepackage-in-CodingGuidelines.lyx.patch Original author: @dkogan --- doc/CodingGuidelines.lyx | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/CodingGuidelines.lyx b/doc/CodingGuidelines.lyx index ce083df6c..caaf27d8e 100644 --- a/doc/CodingGuidelines.lyx +++ b/doc/CodingGuidelines.lyx @@ -5,6 +5,7 @@ \textclass article \begin_preamble \usepackage{color} +\usepackage{listings} \definecolor{mygreen}{rgb}{0,0.6,0} \definecolor{mygray}{rgb}{0.5,0.5,0.5}