diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index a67777b52..7b8c3faf1 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -24,6 +24,16 @@ // #define DEBUG_SMOOTHER namespace gtsam { +/* ************************************************************************* */ +void HybridSmoother::reInitialize(HybridBayesNet &&hybridBayesNet) { + hybridBayesNet_ = std::move(hybridBayesNet); +} + +/* ************************************************************************* */ +void HybridSmoother::reInitialize(HybridBayesNet &hybridBayesNet) { + this->reInitialize(std::move(hybridBayesNet)); +} + /* ************************************************************************* */ Ordering HybridSmoother::getOrdering(const HybridGaussianFactorGraph &factors, const KeySet &lastKeysToEliminate) { @@ -78,9 +88,11 @@ void HybridSmoother::update(const HybridGaussianFactorGraph &newFactors, // If no ordering provided, then we compute one if (!given_ordering.has_value()) { // Get the keys from the new factors - KeySet continuousKeysToInclude; // Scheme 1: empty, 15sec/2000, 64sec/3000 (69s without TF) - // continuousKeysToInclude = newFactors.keys(); // Scheme 2: all, 8sec/2000, 160sec/3000 - // continuousKeysToInclude = updatedGraph.keys(); // Scheme 3: all, stopped after 80sec/2000 + KeySet continuousKeysToInclude; // Scheme 1: empty, 15sec/2000, 64sec/3000 + // (69s without TF) + // continuousKeysToInclude = newFactors.keys(); // Scheme 2: all, + // 8sec/2000, 160sec/3000 continuousKeysToInclude = updatedGraph.keys(); // + // Scheme 3: all, stopped after 80sec/2000 // Since updatedGraph now has all the connected conditionals, // we can get the correct ordering. diff --git a/gtsam/hybrid/HybridSmoother.h b/gtsam/hybrid/HybridSmoother.h index f8eaec26c..53d058036 100644 --- a/gtsam/hybrid/HybridSmoother.h +++ b/gtsam/hybrid/HybridSmoother.h @@ -49,17 +49,13 @@ class GTSAM_EXPORT HybridSmoother { /** * Re-initialize the smoother from a new hybrid Bayes Net. */ - void reInitialize(HybridBayesNet&& hybridBayesNet) { - hybridBayesNet_ = std::move(hybridBayesNet); - } + void reInitialize(HybridBayesNet&& hybridBayesNet); /** * Re-initialize the smoother from * a new hybrid Bayes Net (non rvalue version). */ - void reInitialize(HybridBayesNet& hybridBayesNet) { - this->reInitialize(std::move(hybridBayesNet)); - } + void reInitialize(HybridBayesNet& hybridBayesNet); /** * Given new factors, perform an incremental update.