From a3f2bddb66d107a76e8143cbc9a19f37f76265b6 Mon Sep 17 00:00:00 2001 From: Gold856 <117957790+Gold856@users.noreply.github.com> Date: Tue, 21 Jan 2025 00:04:41 -0500 Subject: [PATCH 1/5] Fix GCC 13 build issues --- examples/TriangulationLOSTExample.cpp | 6 +++--- gtsam/basis/tests/testFourier.cpp | 4 ++++ gtsam/geometry/BearingRange.h | 4 +++- gtsam/geometry/FundamentalMatrix.cpp | 4 ++++ gtsam/slam/tests/testRotateFactor.cpp | 3 +++ 5 files changed, 17 insertions(+), 4 deletions(-) diff --git a/examples/TriangulationLOSTExample.cpp b/examples/TriangulationLOSTExample.cpp index 417c37c45..52744e508 100644 --- a/examples/TriangulationLOSTExample.cpp +++ b/examples/TriangulationLOSTExample.cpp @@ -123,9 +123,9 @@ int main(int argc, char* argv[]) { double rank_tol = 1e-9; std::shared_ptr calib = std::make_shared(); - std::chrono::nanoseconds durationDLT; - std::chrono::nanoseconds durationDLTOpt; - std::chrono::nanoseconds durationLOST; + std::chrono::nanoseconds durationDLT{}; + std::chrono::nanoseconds durationDLTOpt{}; + std::chrono::nanoseconds durationLOST{}; for (int i = 0; i < nrTrials; i++) { Point2Vector noisyMeasurements = diff --git a/gtsam/basis/tests/testFourier.cpp b/gtsam/basis/tests/testFourier.cpp index 2995e8c45..9a46e066e 100644 --- a/gtsam/basis/tests/testFourier.cpp +++ b/gtsam/basis/tests/testFourier.cpp @@ -22,6 +22,10 @@ #include #include +#if defined(__GNUC__) && !defined(__clang__) +#pragma GCC diagnostic warning "-Wstringop-overread" +#pragma GCC diagnostic warning "-Warray-bounds" +#endif using namespace std; using namespace gtsam; diff --git a/gtsam/geometry/BearingRange.h b/gtsam/geometry/BearingRange.h index 9c4e42edf..9deb31b69 100644 --- a/gtsam/geometry/BearingRange.h +++ b/gtsam/geometry/BearingRange.h @@ -138,8 +138,10 @@ public: TangentVector localCoordinates(const BearingRange& other) const { typename traits::TangentVector v1 = traits::Local(bearing_, other.bearing_); typename traits::TangentVector v2 = traits::Local(range_, other.range_); + // Set the first dimB elements to v1, and the next dimR elements to v2 TangentVector v; - v << v1, v2; + v.template head() = v1; + v.template tail() = v2; return v; } diff --git a/gtsam/geometry/FundamentalMatrix.cpp b/gtsam/geometry/FundamentalMatrix.cpp index 837ba7263..5c5e02715 100644 --- a/gtsam/geometry/FundamentalMatrix.cpp +++ b/gtsam/geometry/FundamentalMatrix.cpp @@ -9,6 +9,10 @@ #include #include +#if defined(__GNUC__) && !defined(__clang__) +#pragma GCC diagnostic ignored "-Wmaybe-uninitialized" +#endif + namespace gtsam { //************************************************************************* diff --git a/gtsam/slam/tests/testRotateFactor.cpp b/gtsam/slam/tests/testRotateFactor.cpp index 68e0de0f0..b748f83ed 100644 --- a/gtsam/slam/tests/testRotateFactor.cpp +++ b/gtsam/slam/tests/testRotateFactor.cpp @@ -14,6 +14,9 @@ #include +#if defined(__GNUC__) && !defined(__clang__) +#pragma GCC diagnostic ignored "-Wmaybe-uninitialized" +#endif using namespace std; using namespace std::placeholders; using namespace gtsam; From d6ccb57241618d425a2a85c8b36c92ce2ed4cb2a Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 22 Jan 2025 23:20:41 -0500 Subject: [PATCH 2/5] Fix c++20 issue --- gtsam/navigation/tests/testGPSFactor.cpp | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/gtsam/navigation/tests/testGPSFactor.cpp b/gtsam/navigation/tests/testGPSFactor.cpp index f240e5dbf..62456bb0d 100644 --- a/gtsam/navigation/tests/testGPSFactor.cpp +++ b/gtsam/navigation/tests/testGPSFactor.cpp @@ -38,18 +38,17 @@ static const auto& kWGS84 = Geocentric::WGS84(); // ************************************************************************* namespace example { // ENU Origin is where the plane was in hold next to runway -const double lat0 = 33.86998, lon0 = -84.30626, h0 = 274; +static constexpr double lat0 = 33.86998, lon0 = -84.30626, h0 = 274; // Convert from GPS to ENU -LocalCartesian origin_ENU(lat0, lon0, h0, kWGS84); +static const LocalCartesian origin_ENU(lat0, lon0, h0, kWGS84); // Dekalb-Peachtree Airport runway 2L -const double lat = 33.87071, lon = -84.30482, h = 274;\ +static constexpr double lat = 33.87071, lon = -84.30482, h = 274; // Random lever arm -const Point3 leverArm(0.1, 0.2, 0.3); - -} +static const Point3 leverArm(0.1, 0.2, 0.3); +} // namespace example // ************************************************************************* TEST( GPSFactor, Constructor ) { @@ -135,7 +134,7 @@ TEST( GPSFactorArmCalib, Constructor ) { // Calculate numerical derivatives Matrix expectedH1 = numericalDerivative11( - [&factor, &leverArm](const Pose3& pose_arg) { + [&factor](const Pose3& pose_arg) { return factor.evaluateError(pose_arg, leverArm); }, T); @@ -235,7 +234,7 @@ TEST( GPSFactor2ArmCalib, Constructor ) { // Calculate numerical derivatives Matrix expectedH1 = numericalDerivative11( - [&factor, &leverArm](const NavState& nav_arg) { + [&factor](const NavState& nav_arg) { return factor.evaluateError(nav_arg, leverArm); }, T); From 2d7690dbb7c2468a86df5b63ae585a81c10d71cb Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 23 Jan 2025 20:32:49 -0500 Subject: [PATCH 3/5] update addConditionals to only use factor graph keys and remove an extra loop --- gtsam/hybrid/HybridSmoother.cpp | 33 ++++++++++++++------------------- gtsam/hybrid/HybridSmoother.h | 2 +- 2 files changed, 15 insertions(+), 20 deletions(-) diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index ca3e27252..3e06d98ae 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -68,8 +68,7 @@ void HybridSmoother::update(HybridGaussianFactorGraph graph, } // Add the necessary conditionals from the previous timestep(s). - std::tie(graph, hybridBayesNet_) = - addConditionals(graph, hybridBayesNet_, ordering); + std::tie(graph, hybridBayesNet_) = addConditionals(graph, hybridBayesNet_); // Eliminate. HybridBayesNet bayesNetFragment = *graph.eliminateSequential(ordering); @@ -88,10 +87,11 @@ void HybridSmoother::update(HybridGaussianFactorGraph graph, /* ************************************************************************* */ std::pair HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph, - const HybridBayesNet &originalHybridBayesNet, - const Ordering &ordering) const { + const HybridBayesNet &hybridBayesNet) const { HybridGaussianFactorGraph graph(originalGraph); - HybridBayesNet hybridBayesNet(originalHybridBayesNet); + HybridBayesNet updatedHybridBayesNet(hybridBayesNet); + + KeySet factorKeys = graph.keys(); // If hybridBayesNet is not empty, // it means we have conditionals to add to the factor graph. @@ -99,10 +99,6 @@ HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph, // We add all relevant hybrid conditionals on the last continuous variable // in the previous `hybridBayesNet` to the graph - // Conditionals to remove from the bayes net - // since the conditional will be updated. - std::vector conditionals_to_erase; - // New conditionals to add to the graph gtsam::HybridBayesNet newConditionals; @@ -112,25 +108,24 @@ HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph, auto conditional = hybridBayesNet.at(i); for (auto &key : conditional->frontals()) { - if (std::find(ordering.begin(), ordering.end(), key) != - ordering.end()) { + if (std::find(factorKeys.begin(), factorKeys.end(), key) != + factorKeys.end()) { newConditionals.push_back(conditional); - conditionals_to_erase.push_back(conditional); + + // Remove the conditional from the updated Bayes net + auto it = find(updatedHybridBayesNet.begin(), + updatedHybridBayesNet.end(), conditional); + updatedHybridBayesNet.erase(it); break; } } } - // Remove conditionals at the end so we don't affect the order in the - // original bayes net. - for (auto &&conditional : conditionals_to_erase) { - auto it = find(hybridBayesNet.begin(), hybridBayesNet.end(), conditional); - hybridBayesNet.erase(it); - } graph.push_back(newConditionals); } - return {graph, hybridBayesNet}; + + return {graph, updatedHybridBayesNet}; } /* ************************************************************************* */ diff --git a/gtsam/hybrid/HybridSmoother.h b/gtsam/hybrid/HybridSmoother.h index 66edf86d6..4669b1d8f 100644 --- a/gtsam/hybrid/HybridSmoother.h +++ b/gtsam/hybrid/HybridSmoother.h @@ -66,7 +66,7 @@ class GTSAM_EXPORT HybridSmoother { */ std::pair addConditionals( const HybridGaussianFactorGraph& graph, - const HybridBayesNet& hybridBayesNet, const Ordering& ordering) const; + const HybridBayesNet& hybridBayesNet) const; /** * @brief Get the hybrid Gaussian conditional from From b4020ed67b5aa70f189cbc762a05a35b254ce52b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 23 Jan 2025 21:04:12 -0500 Subject: [PATCH 4/5] fix up getOrdering and update to be more efficient --- gtsam/hybrid/HybridSmoother.cpp | 28 ++++++++++++++++------------ gtsam/hybrid/HybridSmoother.h | 28 ++++++++++++++++++++++++++-- 2 files changed, 42 insertions(+), 14 deletions(-) diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index 3e06d98ae..3f429198e 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -24,17 +24,14 @@ namespace gtsam { /* ************************************************************************* */ -Ordering HybridSmoother::getOrdering( - const HybridGaussianFactorGraph &newFactors) { - HybridGaussianFactorGraph factors(hybridBayesNet()); - factors.push_back(newFactors); - +Ordering HybridSmoother::getOrdering(const HybridGaussianFactorGraph &factors, + const KeySet &newFactorKeys) { // Get all the discrete keys from the factors KeySet allDiscrete = factors.discreteKeySet(); // Create KeyVector with continuous keys followed by discrete keys. KeyVector newKeysDiscreteLast; - const KeySet newFactorKeys = newFactors.keys(); + // Insert continuous keys first. for (auto &k : newFactorKeys) { if (!allDiscrete.exists(k)) { @@ -56,22 +53,29 @@ Ordering HybridSmoother::getOrdering( } /* ************************************************************************* */ -void HybridSmoother::update(HybridGaussianFactorGraph graph, +void HybridSmoother::update(const HybridGaussianFactorGraph &graph, std::optional maxNrLeaves, const std::optional given_ordering) { + HybridGaussianFactorGraph updatedGraph; + // Add the necessary conditionals from the previous timestep(s). + std::tie(updatedGraph, hybridBayesNet_) = + addConditionals(graph, hybridBayesNet_); + Ordering ordering; // If no ordering provided, then we compute one if (!given_ordering.has_value()) { - ordering = this->getOrdering(graph); + // Get the keys from the new factors + const KeySet newFactorKeys = graph.keys(); + + // Since updatedGraph now has all the connected conditionals, + // we can get the correct ordering. + ordering = this->getOrdering(updatedGraph, newFactorKeys); } else { ordering = *given_ordering; } - // Add the necessary conditionals from the previous timestep(s). - std::tie(graph, hybridBayesNet_) = addConditionals(graph, hybridBayesNet_); - // Eliminate. - HybridBayesNet bayesNetFragment = *graph.eliminateSequential(ordering); + HybridBayesNet bayesNetFragment = *updatedGraph.eliminateSequential(ordering); /// Prune if (maxNrLeaves) { diff --git a/gtsam/hybrid/HybridSmoother.h b/gtsam/hybrid/HybridSmoother.h index 4669b1d8f..f941385bd 100644 --- a/gtsam/hybrid/HybridSmoother.h +++ b/gtsam/hybrid/HybridSmoother.h @@ -49,11 +49,35 @@ class GTSAM_EXPORT HybridSmoother { * @param given_ordering The (optional) ordering for elimination, only * continuous variables are allowed */ - void update(HybridGaussianFactorGraph graph, + void update(const HybridGaussianFactorGraph& graph, std::optional maxNrLeaves = {}, const std::optional given_ordering = {}); - Ordering getOrdering(const HybridGaussianFactorGraph& newFactors); + /** + * @brief Get an elimination ordering which eliminates continuous and then + * discrete. + * + * Expects `newFactors` to already have the necessary conditionals connected + * to the + * + * @param factors + * @return Ordering + */ + + /** + * @brief Get an elimination ordering which eliminates continuous + * and then discrete. + * + * Expects `factors` to already have the necessary conditionals + * which were connected to the variables in the newly added factors. + * Those variables should be in `newFactorKeys`. + * + * @param factors All the new factors and connected conditionals. + * @param newFactorKeys The keys/variables in the newly added factors. + * @return Ordering + */ + Ordering getOrdering(const HybridGaussianFactorGraph& factors, + const KeySet& newFactorKeys); /** * @brief Add conditionals from previous timestep as part of liquefication. From 8ad3cb6ba167883abbbcb058bc9ddda941357fd7 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 23 Jan 2025 21:13:39 -0500 Subject: [PATCH 5/5] update HybridSmoother tests --- gtsam/hybrid/tests/testHybridSmoother.cpp | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridSmoother.cpp b/gtsam/hybrid/tests/testHybridSmoother.cpp index 145f44d1e..5493ce53e 100644 --- a/gtsam/hybrid/tests/testHybridSmoother.cpp +++ b/gtsam/hybrid/tests/testHybridSmoother.cpp @@ -95,16 +95,15 @@ TEST(HybridSmoother, IncrementalSmoother) { initial.insert(X(k), switching.linearizationPoint.at(X(k))); HybridGaussianFactorGraph linearized = *graph.linearize(initial); - Ordering ordering = smoother.getOrdering(linearized); - smoother.update(linearized, maxNrLeaves, ordering); + smoother.update(linearized, maxNrLeaves); // Clear all the factors from the graph graph.resize(0); } EXPECT_LONGS_EQUAL(11, - smoother.hybridBayesNet().at(0)->asDiscrete()->nrValues()); + smoother.hybridBayesNet().at(3)->asDiscrete()->nrValues()); // Get the continuous delta update as well as // the optimal discrete assignment. @@ -150,16 +149,15 @@ TEST(HybridSmoother, ValidPruningError) { initial.insert(X(k), switching.linearizationPoint.at(X(k))); HybridGaussianFactorGraph linearized = *graph.linearize(initial); - Ordering ordering = smoother.getOrdering(linearized); - smoother.update(linearized, maxNrLeaves, ordering); + smoother.update(linearized, maxNrLeaves); // Clear all the factors from the graph graph.resize(0); } EXPECT_LONGS_EQUAL(14, - smoother.hybridBayesNet().at(0)->asDiscrete()->nrValues()); + smoother.hybridBayesNet().at(6)->asDiscrete()->nrValues()); // Get the continuous delta update as well as // the optimal discrete assignment.