diff --git a/gtsam_unstable/CMakeLists.txt b/gtsam_unstable/CMakeLists.txt index be5171049..8b366c805 100644 --- a/gtsam_unstable/CMakeLists.txt +++ b/gtsam_unstable/CMakeLists.txt @@ -17,10 +17,6 @@ add_custom_target(check.unstable COMMAND ${CMAKE_CTEST_COMMAND} --output-on-fail # Add the full name to this list, as in the following example # Sources to remove from builds set (excluded_sources # "") - "${CMAKE_CURRENT_SOURCE_DIR}/nonlinear/ConcurrentIncrementalFilter.cpp" - "${CMAKE_CURRENT_SOURCE_DIR}/nonlinear/ConcurrentIncrementalSmoother.cpp" - "${CMAKE_CURRENT_SOURCE_DIR}/nonlinear/IncrementalFixedLagSmoother.cpp" - "${CMAKE_CURRENT_SOURCE_DIR}/slam/serialization.cpp" ) diff --git a/gtsam_unstable/examples/ConcurrentFilteringAndSmoothingExample.cpp b/gtsam_unstable/examples/ConcurrentFilteringAndSmoothingExample.cpp index 65fc681c4..4e6e1a524 100644 --- a/gtsam_unstable/examples/ConcurrentFilteringAndSmoothingExample.cpp +++ b/gtsam_unstable/examples/ConcurrentFilteringAndSmoothingExample.cpp @@ -49,7 +49,7 @@ #include // We will use simple integer Keys to uniquely identify each robot pose. -#include +#include // We will use Pose2 variables (x, y, theta) to represent the robot positions #include @@ -86,7 +86,7 @@ int main(int argc, char** argv) { Pose2 priorMean(0.0, 0.0, 0.0); // prior at origin noiseModel::Diagonal::shared_ptr priorNoise = noiseModel::Diagonal::Sigmas(Vector_(3, 0.3, 0.3, 0.1)); Key priorKey = 0; - newFactors.add(PriorFactor(priorKey, priorMean, priorNoise)); + newFactors.push_back(PriorFactor(priorKey, priorMean, priorNoise)); newValues.insert(priorKey, priorMean); // Initialize the first pose at the mean of the prior newTimestamps[priorKey] = 0.0; // Set the timestamp associated with this key to 0.0 seconds; @@ -111,11 +111,11 @@ int main(int argc, char** argv) { // Add odometry factors from two different sources with different error stats Pose2 odometryMeasurement1 = Pose2(0.61, -0.08, 0.02); noiseModel::Diagonal::shared_ptr odometryNoise1 = noiseModel::Diagonal::Sigmas(Vector_(3, 0.1, 0.1, 0.05)); - newFactors.add(BetweenFactor(previousKey, currentKey, odometryMeasurement1, odometryNoise1)); + newFactors.push_back(BetweenFactor(previousKey, currentKey, odometryMeasurement1, odometryNoise1)); Pose2 odometryMeasurement2 = Pose2(0.47, 0.03, 0.01); noiseModel::Diagonal::shared_ptr odometryNoise2 = noiseModel::Diagonal::Sigmas(Vector_(3, 0.05, 0.05, 0.05)); - newFactors.add(BetweenFactor(previousKey, currentKey, odometryMeasurement2, odometryNoise2)); + newFactors.push_back(BetweenFactor(previousKey, currentKey, odometryMeasurement2, odometryNoise2)); // Unlike the fixed-lag versions, the concurrent filter implementation // requires the user to supply the specify which keys to move to the smoother @@ -190,11 +190,11 @@ int main(int argc, char** argv) { // Add odometry factors from two different sources with different error stats Pose2 odometryMeasurement1 = Pose2(0.61, -0.08, 0.02); noiseModel::Diagonal::shared_ptr odometryNoise1 = noiseModel::Diagonal::Sigmas(Vector_(3, 0.1, 0.1, 0.05)); - newFactors.add(BetweenFactor(previousKey, currentKey, odometryMeasurement1, odometryNoise1)); + newFactors.push_back(BetweenFactor(previousKey, currentKey, odometryMeasurement1, odometryNoise1)); Pose2 odometryMeasurement2 = Pose2(0.47, 0.03, 0.01); noiseModel::Diagonal::shared_ptr odometryNoise2 = noiseModel::Diagonal::Sigmas(Vector_(3, 0.05, 0.05, 0.05)); - newFactors.add(BetweenFactor(previousKey, currentKey, odometryMeasurement2, odometryNoise2)); + newFactors.push_back(BetweenFactor(previousKey, currentKey, odometryMeasurement2, odometryNoise2)); // Unlike the fixed-lag versions, the concurrent filter implementation // requires the user to supply the specify which keys to marginalize @@ -263,11 +263,11 @@ int main(int argc, char** argv) { // Add odometry factors from two different sources with different error stats Pose2 odometryMeasurement1 = Pose2(0.61, -0.08, 0.02); noiseModel::Diagonal::shared_ptr odometryNoise1 = noiseModel::Diagonal::Sigmas(Vector_(3, 0.1, 0.1, 0.05)); - newFactors.add(BetweenFactor(previousKey, currentKey, odometryMeasurement1, odometryNoise1)); + newFactors.push_back(BetweenFactor(previousKey, currentKey, odometryMeasurement1, odometryNoise1)); Pose2 odometryMeasurement2 = Pose2(0.47, 0.03, 0.01); noiseModel::Diagonal::shared_ptr odometryNoise2 = noiseModel::Diagonal::Sigmas(Vector_(3, 0.05, 0.05, 0.05)); - newFactors.add(BetweenFactor(previousKey, currentKey, odometryMeasurement2, odometryNoise2)); + newFactors.push_back(BetweenFactor(previousKey, currentKey, odometryMeasurement2, odometryNoise2)); // Unlike the fixed-lag versions, the concurrent filter implementation // requires the user to supply the specify which keys to marginalize diff --git a/gtsam_unstable/examples/FixedLagSmootherExample.cpp b/gtsam_unstable/examples/FixedLagSmootherExample.cpp index 1fcc00c73..41fbcc89c 100644 --- a/gtsam_unstable/examples/FixedLagSmootherExample.cpp +++ b/gtsam_unstable/examples/FixedLagSmootherExample.cpp @@ -80,7 +80,7 @@ int main(int argc, char** argv) { Pose2 priorMean(0.0, 0.0, 0.0); // prior at origin noiseModel::Diagonal::shared_ptr priorNoise = noiseModel::Diagonal::Sigmas(Vector_(3, 0.3, 0.3, 0.1)); Key priorKey = 0; - newFactors.add(PriorFactor(priorKey, priorMean, priorNoise)); + newFactors.push_back(PriorFactor(priorKey, priorMean, priorNoise)); newValues.insert(priorKey, priorMean); // Initialize the first pose at the mean of the prior newTimestamps[priorKey] = 0.0; // Set the timestamp associated with this key to 0.0 seconds; @@ -105,11 +105,11 @@ int main(int argc, char** argv) { // Add odometry factors from two different sources with different error stats Pose2 odometryMeasurement1 = Pose2(0.61, -0.08, 0.02); noiseModel::Diagonal::shared_ptr odometryNoise1 = noiseModel::Diagonal::Sigmas(Vector_(3, 0.1, 0.1, 0.05)); - newFactors.add(BetweenFactor(previousKey, currentKey, odometryMeasurement1, odometryNoise1)); + newFactors.push_back(BetweenFactor(previousKey, currentKey, odometryMeasurement1, odometryNoise1)); Pose2 odometryMeasurement2 = Pose2(0.47, 0.03, 0.01); noiseModel::Diagonal::shared_ptr odometryNoise2 = noiseModel::Diagonal::Sigmas(Vector_(3, 0.05, 0.05, 0.05)); - newFactors.add(BetweenFactor(previousKey, currentKey, odometryMeasurement2, odometryNoise2)); + newFactors.push_back(BetweenFactor(previousKey, currentKey, odometryMeasurement2, odometryNoise2)); // Update the smoothers with the new factors smootherBatch.update(newFactors, newValues, newTimestamps); diff --git a/gtsam_unstable/examples/SmartRangeExample_plaza1.cpp b/gtsam_unstable/examples/SmartRangeExample_plaza1.cpp index ef56ceafb..7a7a921f7 100644 --- a/gtsam_unstable/examples/SmartRangeExample_plaza1.cpp +++ b/gtsam_unstable/examples/SmartRangeExample_plaza1.cpp @@ -129,7 +129,7 @@ int main(int argc, char** argv) { Pose2 pose0 = Pose2(-34.2086489999201, 45.3007639991120, M_PI - 2.02108900000000); NonlinearFactorGraph newFactors; - newFactors.add(PriorFactor(0, pose0, priorNoise)); + newFactors.push_back(PriorFactor(0, pose0, priorNoise)); ofstream os2( "/Users/dellaert/borg/gtsam/gtsam_unstable/examples/rangeResultLM.txt"); @@ -154,7 +154,7 @@ int main(int argc, char** argv) { if (smart) { BOOST_FOREACH(size_t jj,ids) { smartFactors[jj] = SmartPtr(new SmartRangeFactor(sigmaR)); - newFactors.add(smartFactors[jj]); + newFactors.push_back(smartFactors[jj]); } } @@ -173,7 +173,7 @@ int main(int argc, char** argv) { boost::tie(t, odometry) = timedOdometry; // add odometry factor - newFactors.add( + newFactors.push_back( BetweenFactor(i - 1, i, odometry, NM::Diagonal::Sigmas(odoSigmas))); @@ -198,7 +198,7 @@ int main(int argc, char** argv) { // Throw out obvious outliers based on current landmark estimates Vector error = factor.unwhitenedError(landmarkEstimates); if (k <= 200 || fabs(error[0]) < 5) - newFactors.add(factor); + newFactors.push_back(factor); } totalCount += 1; } diff --git a/gtsam_unstable/examples/SmartRangeExample_plaza2.cpp b/gtsam_unstable/examples/SmartRangeExample_plaza2.cpp index f52134c22..5291d2f88 100644 --- a/gtsam_unstable/examples/SmartRangeExample_plaza2.cpp +++ b/gtsam_unstable/examples/SmartRangeExample_plaza2.cpp @@ -124,7 +124,7 @@ int main(int argc, char** argv) { // Add prior on first pose Pose2 pose0 = Pose2(-34.2086489999201, 45.3007639991120, -2.02108900000000); NonlinearFactorGraph newFactors; - newFactors.add(PriorFactor(0, pose0, priorNoise)); + newFactors.push_back(PriorFactor(0, pose0, priorNoise)); // initialize points (Gaussian) Values initial; @@ -150,7 +150,7 @@ int main(int argc, char** argv) { boost::tie(t, odometry) = timedOdometry; // add odometry factor - newFactors.add( + newFactors.push_back( BetweenFactor(i - 1, i, odometry, NM::Diagonal::Sigmas(odoSigmas))); @@ -168,7 +168,7 @@ int main(int argc, char** argv) { // Throw out obvious outliers based on current landmark estimates Vector error = factor.unwhitenedError(landmarkEstimates); if (k <= 200 || fabs(error[0]) < 5) - newFactors.add(factor); + newFactors.push_back(factor); k = k + 1; countK = countK + 1; } diff --git a/gtsam_unstable/nonlinear/CMakeLists.txt b/gtsam_unstable/nonlinear/CMakeLists.txt index 875105418..a27a01ec8 100644 --- a/gtsam_unstable/nonlinear/CMakeLists.txt +++ b/gtsam_unstable/nonlinear/CMakeLists.txt @@ -20,10 +20,8 @@ ${gtsam_unstable-default}) # Exclude tests that don't work set (nonlinear_excluded_tests #"") -"${CMAKE_CURRENT_SOURCE_DIR}/tests/testConcurrentIncrementalFilter.cpp" -"${CMAKE_CURRENT_SOURCE_DIR}/tests/testConcurrentIncrementalSmootherDL.cpp" -"${CMAKE_CURRENT_SOURCE_DIR}/tests/testConcurrentIncrementalSmootherGN.cpp" -"${CMAKE_CURRENT_SOURCE_DIR}/tests/testIncrementalFixedLagSmoother.cpp" +"${CMAKE_CURRENT_SOURCE_DIR}/tests/testConcurrentIncrementalFilter.cpp" +#"${CMAKE_CURRENT_SOURCE_DIR}/tests/testIncrementalFixedLagSmoother.cpp" ) diff --git a/gtsam_unstable/nonlinear/FixedLagSmoother.h b/gtsam_unstable/nonlinear/FixedLagSmoother.h index 686cd239f..f212b38e7 100644 --- a/gtsam_unstable/nonlinear/FixedLagSmoother.h +++ b/gtsam_unstable/nonlinear/FixedLagSmoother.h @@ -61,10 +61,10 @@ public: /** default constructor */ - FixedLagSmoother(double smootherLag = 0.0) : smootherLag_(smootherLag) { }; + FixedLagSmoother(double smootherLag = 0.0) : smootherLag_(smootherLag) { } /** destructor */ - virtual ~FixedLagSmoother() { }; + virtual ~FixedLagSmoother() { } /** Print the factor for debugging and testing (implementing Testable) */ virtual void print(const std::string& s = "FixedLagSmoother:\n", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; diff --git a/gtsam_unstable/nonlinear/IncrementalFixedLagSmoother.h b/gtsam_unstable/nonlinear/IncrementalFixedLagSmoother.h index ceb15f1f1..9f015ef19 100644 --- a/gtsam_unstable/nonlinear/IncrementalFixedLagSmoother.h +++ b/gtsam_unstable/nonlinear/IncrementalFixedLagSmoother.h @@ -39,11 +39,11 @@ public: typedef boost::shared_ptr shared_ptr; /** default constructor */ - IncrementalFixedLagSmoother(double smootherLag = 0.0, const ISAM2Params& parameters = ISAM2Params()) : - FixedLagSmoother(smootherLag), isam_(parameters) { }; + IncrementalFixedLagSmoother(double smootherLag = 0.0, const ISAM2Params& parameters = ISAM2Params()) + : FixedLagSmoother(smootherLag), isam_(parameters) {} /** destructor */ - virtual ~IncrementalFixedLagSmoother() { }; + virtual ~IncrementalFixedLagSmoother() {} /** Print the factor for debugging and testing (implementing Testable) */ virtual void print(const std::string& s = "IncrementalFixedLagSmoother:\n", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; diff --git a/gtsam_unstable/nonlinear/tests/testConcurrentIncrementalSmootherDL.cpp b/gtsam_unstable/nonlinear/tests/testConcurrentIncrementalSmootherDL.cpp index c02dbea18..03f322ec6 100644 --- a/gtsam_unstable/nonlinear/tests/testConcurrentIncrementalSmootherDL.cpp +++ b/gtsam_unstable/nonlinear/tests/testConcurrentIncrementalSmootherDL.cpp @@ -23,10 +23,10 @@ #include #include #include -#include #include #include -#include +#include +#include #include #include #include @@ -463,7 +463,7 @@ TEST( ConcurrentIncrementalSmootherDL, synchronize_2 ) filterSeparatorValues.insert(1, Pose3().compose(poseError)); filterSeparatorValues.insert(2, filterSeparatorValues.at(1).compose(poseOdometry).compose(poseError)); filterSumarization.push_back(LinearContainerFactor(PriorFactor(1, poseInitial, noisePrior).linearize(filterSeparatorValues), filterSeparatorValues)); - filterSumarization.push_back(LinearContainerFactor(BetweenFactor(1, 2, poseOdometry, noiseOdometery).linearize(filterSeparatorValues,), filterSeparatorValues)); + filterSumarization.push_back(LinearContainerFactor(BetweenFactor(1, 2, poseOdometry, noiseOdometery).linearize(filterSeparatorValues), filterSeparatorValues)); smootherFactors.push_back(BetweenFactor(2, 3, poseOdometry, noiseOdometery)); smootherFactors.push_back(BetweenFactor(3, 4, poseOdometry, noiseOdometery)); smootherValues.insert(3, filterSeparatorValues.at(2).compose(poseOdometry).compose(poseError)); @@ -573,23 +573,21 @@ TEST( ConcurrentIncrementalSmootherDL, synchronize_3 ) // Check the optimized value of smoother state NonlinearFactorGraph allFactors = smootherFactors; Values allValues = smoother.getLinearizationPoint(); - ordering = smoother.getOrdering(); // I'm really hoping this is an acceptable ordering... - GaussianFactorGraph::shared_ptr LinFactorGraph = allFactors.linearize(allValues, ordering); + GaussianFactorGraph::shared_ptr LinFactorGraph = allFactors.linearize(allValues); // GaussianSequentialSolver GSS = GaussianSequentialSolver(*LinFactorGraph); // GaussianBayesNet::shared_ptr GBNsptr = GSS.eliminate(); FastSet allkeys = LinFactorGraph->keys(); BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, filterSeparatorValues) { - Index index = ordering.at(key_value.key); - allkeys.erase(index); + allkeys.erase(key_value.key); } std::vector variables(allkeys.begin(), allkeys.end()); - std::pair result = LinFactorGraph->eliminate(variables, EliminateCholesky); + std::pair result = LinFactorGraph->eliminatePartialSequential(variables, EliminateCholesky); expectedSmootherSummarization.resize(0); - BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, result.second) { - expectedSmootherSummarization.push_back(LinearContainerFactor(factor, ordering, allValues)); + BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, *result.second) { + expectedSmootherSummarization.push_back(LinearContainerFactor(factor, allValues)); } CHECK(assert_equal(expectedSmootherSummarization, actualSmootherSummarization, 1e-6)); diff --git a/gtsam_unstable/nonlinear/tests/testConcurrentIncrementalSmootherGN.cpp b/gtsam_unstable/nonlinear/tests/testConcurrentIncrementalSmootherGN.cpp index 443273479..8de7dc56d 100644 --- a/gtsam_unstable/nonlinear/tests/testConcurrentIncrementalSmootherGN.cpp +++ b/gtsam_unstable/nonlinear/tests/testConcurrentIncrementalSmootherGN.cpp @@ -399,8 +399,8 @@ TEST( ConcurrentIncrementalSmootherGN, synchronize_1 ) Ordering ordering; ordering.push_back(1); ordering.push_back(2); - filterSumarization.push_back(LinearContainerFactor(PriorFactor(1, poseInitial, noisePrior).linearize(filterSeparatorValues, ordering), ordering, filterSeparatorValues)); - filterSumarization.push_back(LinearContainerFactor(BetweenFactor(1, 2, poseOdometry, noiseOdometery).linearize(filterSeparatorValues, ordering), ordering, filterSeparatorValues)); + filterSumarization.push_back(LinearContainerFactor(PriorFactor(1, poseInitial, noisePrior).linearize(filterSeparatorValues), filterSeparatorValues)); + filterSumarization.push_back(LinearContainerFactor(BetweenFactor(1, 2, poseOdometry, noiseOdometery).linearize(filterSeparatorValues), filterSeparatorValues)); // Create expected values: the smoother output will be empty for this case NonlinearFactorGraph expectedSmootherSummarization; @@ -582,16 +582,14 @@ TEST( ConcurrentIncrementalSmootherGN, synchronize_3 ) // GaussianBayesNet::shared_ptr GBNsptr = GSS.eliminate(); FastSet allkeys = LinFactorGraph->keys(); - BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, filterSeparatorValues) { - Index index = ordering.at(key_value.key); - allkeys.erase(index); - } + BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, filterSeparatorValues) + allkeys.erase(key_value.key); std::vector variables(allkeys.begin(), allkeys.end()); - std::pair result = LinFactorGraph->eliminate(variables, EliminateCholesky); + std::pair result = LinFactorGraph->eliminatePartialSequential(variables, EliminateCholesky); expectedSmootherSummarization.resize(0); - BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, result.second) { - expectedSmootherSummarization.push_back(LinearContainerFactor(factor, ordering, allValues)); + BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, *result.second) { + expectedSmootherSummarization.push_back(LinearContainerFactor(factor, allValues)); } CHECK(assert_equal(expectedSmootherSummarization, actualSmootherSummarization, 1e-6));