diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index 8298e813a..4beaf6474 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -96,9 +96,17 @@ void HybridSmoother::removeFixedValues( } /* ************************************************************************* */ -void HybridSmoother::update(const HybridGaussianFactorGraph &newFactors, +void HybridSmoother::update(const HybridNonlinearFactorGraph &newFactors, + const Values &initial, std::optional maxNrLeaves, const std::optional given_ordering) { + HybridGaussianFactorGraph linearizedFactors = *newFactors.linearize(initial); + + // Record the new nonlinear factors and + // linearization point for relinearization + allFactors_.push_back(newFactors); + linearizationPoint_.insert_or_assign(initial); + const KeySet originalNewFactorKeys = newFactors.keys(); #ifdef DEBUG_SMOOTHER std::cout << "hybridBayesNet_ size before: " << hybridBayesNet_.size() @@ -108,7 +116,7 @@ void HybridSmoother::update(const HybridGaussianFactorGraph &newFactors, HybridGaussianFactorGraph updatedGraph; // Add the necessary conditionals from the previous timestep(s). std::tie(updatedGraph, hybridBayesNet_) = - addConditionals(newFactors, hybridBayesNet_); + addConditionals(linearizedFactors, hybridBayesNet_); #ifdef DEBUG_SMOOTHER // print size of newFactors, updatedGraph, hybridBayesNet_ std::cout << "updatedGraph size: " << updatedGraph.size() << std::endl; @@ -283,4 +291,25 @@ HybridValues HybridSmoother::optimize() const { return HybridValues(continuous, mpe); } +/* ************************************************************************* */ +void HybridSmoother::relinearize() { + allFactors_ = allFactors_.restrict(fixedValues_); + HybridGaussianFactorGraph::shared_ptr linearized = + allFactors_.linearize(linearizationPoint_); + HybridBayesNet::shared_ptr bayesNet = linearized->eliminateSequential(); + HybridValues delta = bayesNet->optimize(); + linearizationPoint_ = linearizationPoint_.retract(delta.continuous()); + reInitialize(*bayesNet); +} + +/* ************************************************************************* */ +Values HybridSmoother::linearizationPoint() const { + return linearizationPoint_; +} + +/* ************************************************************************* */ +HybridNonlinearFactorGraph HybridSmoother::allFactors() const { + return allFactors_; +} + } // namespace gtsam diff --git a/gtsam/hybrid/HybridSmoother.h b/gtsam/hybrid/HybridSmoother.h index ef75b06b5..b45327258 100644 --- a/gtsam/hybrid/HybridSmoother.h +++ b/gtsam/hybrid/HybridSmoother.h @@ -19,6 +19,7 @@ #include #include #include +#include #include @@ -26,8 +27,10 @@ namespace gtsam { class GTSAM_EXPORT HybridSmoother { private: - HybridBayesNet hybridBayesNet_; + HybridNonlinearFactorGraph allFactors_; + Values linearizationPoint_; + HybridBayesNet hybridBayesNet_; /// The threshold above which we make a decision about a mode. std::optional marginalThreshold_; DiscreteValues fixedValues_; @@ -73,12 +76,12 @@ class GTSAM_EXPORT HybridSmoother { * @param graph The new factors, should be linear only * @param maxNrLeaves The maximum number of leaves in the new discrete factor, * if applicable - * @param given_ordering The (optional) ordering for elimination, only + * @param givenOrdering The (optional) ordering for elimination, only * continuous variables are allowed */ - void update(const HybridGaussianFactorGraph& graph, + void update(const HybridNonlinearFactorGraph& graph, const Values& initial, std::optional maxNrLeaves = {}, - const std::optional given_ordering = {}); + const std::optional givenOrdering = {}); /** * @brief Get an elimination ordering which eliminates continuous @@ -123,6 +126,16 @@ class GTSAM_EXPORT HybridSmoother { /// Optimize the hybrid Bayes Net, taking into accound fixed values. HybridValues optimize() const; + /// Relinearize the nonlinear factor graph + /// with the latest linearization point. + void relinearize(); + + /// Return the current linearization point. + Values linearizationPoint() const; + + /// Return all the recorded nonlinear factors + HybridNonlinearFactorGraph allFactors() const; + private: /// Helper to compute the ordering if ordering is not given. Ordering maybeComputeOrdering(const HybridGaussianFactorGraph& updatedGraph, diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index 5e93c85a9..f76de20be 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -283,10 +283,16 @@ class HybridSmoother { void reInitialize(gtsam::HybridBayesNet& hybridBayesNet); void update( - const gtsam::HybridGaussianFactorGraph& graph, + const gtsam::HybridNonlinearFactorGraph& graph, + const gtsam::Values& initial, std::optional maxNrLeaves = std::nullopt, const std::optional given_ordering = std::nullopt); + void relinearize(); + + gtsam::Values linearizationPoint() const; + gtsam::HybridNonlinearFactorGraph allFactors() const; + gtsam::Ordering getOrdering(const gtsam::HybridGaussianFactorGraph& factors, const gtsam::KeySet& newFactorKeys); diff --git a/gtsam/hybrid/tests/testHybridSmoother.cpp b/gtsam/hybrid/tests/testHybridSmoother.cpp index 0d3608e40..cc1a70010 100644 --- a/gtsam/hybrid/tests/testHybridSmoother.cpp +++ b/gtsam/hybrid/tests/testHybridSmoother.cpp @@ -94,9 +94,7 @@ TEST(HybridSmoother, IncrementalSmoother) { initial.insert(X(k), switching.linearizationPoint.at(X(k))); - HybridGaussianFactorGraph linearized = *graph.linearize(initial); - - smoother.update(linearized, maxNrLeaves); + smoother.update(graph, initial, maxNrLeaves); // Clear all the factors from the graph graph.resize(0); @@ -152,9 +150,7 @@ TEST(HybridSmoother, ValidPruningError) { initial.insert(X(k), switching.linearizationPoint.at(X(k))); - HybridGaussianFactorGraph linearized = *graph.linearize(initial); - - smoother.update(linearized, maxNrLeaves); + smoother.update(graph, initial, maxNrLeaves); // Clear all the factors from the graph graph.resize(0); @@ -200,9 +196,7 @@ TEST(HybridSmoother, DeadModeRemoval) { initial.insert(X(k), switching.linearizationPoint.at(X(k))); - HybridGaussianFactorGraph linearized = *graph.linearize(initial); - - smoother.update(linearized, maxNrLeaves); + smoother.update(graph, initial, maxNrLeaves); // Clear all the factors from the graph graph.resize(0); diff --git a/python/gtsam/examples/HybridCity10000.py b/python/gtsam/examples/HybridCity10000.py index e1f4b9d05..17be1b267 100644 --- a/python/gtsam/examples/HybridCity10000.py +++ b/python/gtsam/examples/HybridCity10000.py @@ -194,7 +194,6 @@ class Experiment: self.smoother_ = HybridSmoother(marginal_threshold) self.new_factors_ = HybridNonlinearFactorGraph() - self.all_factors_ = HybridNonlinearFactorGraph() self.initial_ = Values() self.plot_hypotheses = plot_hypotheses @@ -231,24 +230,18 @@ class Experiment: """Perform smoother update and optimize the graph.""" print(f"Smoother update: {self.new_factors_.size()}") before_update = time.time() - linearized = self.new_factors_.linearize(self.initial_) - self.smoother_.update(linearized, max_num_hypotheses) - self.all_factors_.push_back(self.new_factors_) + self.smoother_.update(self.new_factors_, self.initial_, + max_num_hypotheses) self.new_factors_.resize(0) after_update = time.time() return after_update - before_update def reinitialize(self) -> float: """Re-linearize, solve ALL, and re-initialize smoother.""" - print(f"================= Re-Initialize: {self.all_factors_.size()}") + print(f"================= Re-Initialize: {self.smoother_.allFactors().size()}") before_update = time.time() - self.all_factors_ = self.all_factors_.restrict( - self.smoother_.fixedValues()) - linearized = self.all_factors_.linearize(self.initial_) - bayesNet = linearized.eliminateSequential() - delta: HybridValues = bayesNet.optimize() - self.initial_ = self.initial_.retract(delta.continuous()) - self.smoother_.reInitialize(bayesNet) + self.smoother_.relinearize() + self.initial_ = self.smoother_.linearizationPoint() after_update = time.time() print(f"Took {after_update - before_update} seconds.") return after_update - before_update