Fixing examples and some tests for gtsam_unstable/nonlinear. testConcurrentIncrementalFilter doesn't compile and is disabled, testIncrementalFixedLagSmoother is enabled and builds, but fails.

release/4.3a0
Alex Cunningham 2013-08-19 15:32:08 +00:00
parent f3fdf8abe9
commit f9dcf31c2b
10 changed files with 40 additions and 50 deletions

View File

@ -17,10 +17,6 @@ add_custom_target(check.unstable COMMAND ${CMAKE_CTEST_COMMAND} --output-on-fail
# Add the full name to this list, as in the following example
# Sources to remove from builds
set (excluded_sources # "")
"${CMAKE_CURRENT_SOURCE_DIR}/nonlinear/ConcurrentIncrementalFilter.cpp"
"${CMAKE_CURRENT_SOURCE_DIR}/nonlinear/ConcurrentIncrementalSmoother.cpp"
"${CMAKE_CURRENT_SOURCE_DIR}/nonlinear/IncrementalFixedLagSmoother.cpp"
"${CMAKE_CURRENT_SOURCE_DIR}/slam/serialization.cpp"
)

View File

@ -49,7 +49,7 @@
#include <gtsam/nonlinear/Values.h>
// We will use simple integer Keys to uniquely identify each robot pose.
#include <gtsam/nonlinear/Key.h>
#include <gtsam/inference/Key.h>
// We will use Pose2 variables (x, y, theta) to represent the robot positions
#include <gtsam/geometry/Pose2.h>
@ -86,7 +86,7 @@ int main(int argc, char** argv) {
Pose2 priorMean(0.0, 0.0, 0.0); // prior at origin
noiseModel::Diagonal::shared_ptr priorNoise = noiseModel::Diagonal::Sigmas(Vector_(3, 0.3, 0.3, 0.1));
Key priorKey = 0;
newFactors.add(PriorFactor<Pose2>(priorKey, priorMean, priorNoise));
newFactors.push_back(PriorFactor<Pose2>(priorKey, priorMean, priorNoise));
newValues.insert(priorKey, priorMean); // Initialize the first pose at the mean of the prior
newTimestamps[priorKey] = 0.0; // Set the timestamp associated with this key to 0.0 seconds;
@ -111,11 +111,11 @@ int main(int argc, char** argv) {
// Add odometry factors from two different sources with different error stats
Pose2 odometryMeasurement1 = Pose2(0.61, -0.08, 0.02);
noiseModel::Diagonal::shared_ptr odometryNoise1 = noiseModel::Diagonal::Sigmas(Vector_(3, 0.1, 0.1, 0.05));
newFactors.add(BetweenFactor<Pose2>(previousKey, currentKey, odometryMeasurement1, odometryNoise1));
newFactors.push_back(BetweenFactor<Pose2>(previousKey, currentKey, odometryMeasurement1, odometryNoise1));
Pose2 odometryMeasurement2 = Pose2(0.47, 0.03, 0.01);
noiseModel::Diagonal::shared_ptr odometryNoise2 = noiseModel::Diagonal::Sigmas(Vector_(3, 0.05, 0.05, 0.05));
newFactors.add(BetweenFactor<Pose2>(previousKey, currentKey, odometryMeasurement2, odometryNoise2));
newFactors.push_back(BetweenFactor<Pose2>(previousKey, currentKey, odometryMeasurement2, odometryNoise2));
// Unlike the fixed-lag versions, the concurrent filter implementation
// requires the user to supply the specify which keys to move to the smoother
@ -190,11 +190,11 @@ int main(int argc, char** argv) {
// Add odometry factors from two different sources with different error stats
Pose2 odometryMeasurement1 = Pose2(0.61, -0.08, 0.02);
noiseModel::Diagonal::shared_ptr odometryNoise1 = noiseModel::Diagonal::Sigmas(Vector_(3, 0.1, 0.1, 0.05));
newFactors.add(BetweenFactor<Pose2>(previousKey, currentKey, odometryMeasurement1, odometryNoise1));
newFactors.push_back(BetweenFactor<Pose2>(previousKey, currentKey, odometryMeasurement1, odometryNoise1));
Pose2 odometryMeasurement2 = Pose2(0.47, 0.03, 0.01);
noiseModel::Diagonal::shared_ptr odometryNoise2 = noiseModel::Diagonal::Sigmas(Vector_(3, 0.05, 0.05, 0.05));
newFactors.add(BetweenFactor<Pose2>(previousKey, currentKey, odometryMeasurement2, odometryNoise2));
newFactors.push_back(BetweenFactor<Pose2>(previousKey, currentKey, odometryMeasurement2, odometryNoise2));
// Unlike the fixed-lag versions, the concurrent filter implementation
// requires the user to supply the specify which keys to marginalize
@ -263,11 +263,11 @@ int main(int argc, char** argv) {
// Add odometry factors from two different sources with different error stats
Pose2 odometryMeasurement1 = Pose2(0.61, -0.08, 0.02);
noiseModel::Diagonal::shared_ptr odometryNoise1 = noiseModel::Diagonal::Sigmas(Vector_(3, 0.1, 0.1, 0.05));
newFactors.add(BetweenFactor<Pose2>(previousKey, currentKey, odometryMeasurement1, odometryNoise1));
newFactors.push_back(BetweenFactor<Pose2>(previousKey, currentKey, odometryMeasurement1, odometryNoise1));
Pose2 odometryMeasurement2 = Pose2(0.47, 0.03, 0.01);
noiseModel::Diagonal::shared_ptr odometryNoise2 = noiseModel::Diagonal::Sigmas(Vector_(3, 0.05, 0.05, 0.05));
newFactors.add(BetweenFactor<Pose2>(previousKey, currentKey, odometryMeasurement2, odometryNoise2));
newFactors.push_back(BetweenFactor<Pose2>(previousKey, currentKey, odometryMeasurement2, odometryNoise2));
// Unlike the fixed-lag versions, the concurrent filter implementation
// requires the user to supply the specify which keys to marginalize

View File

@ -80,7 +80,7 @@ int main(int argc, char** argv) {
Pose2 priorMean(0.0, 0.0, 0.0); // prior at origin
noiseModel::Diagonal::shared_ptr priorNoise = noiseModel::Diagonal::Sigmas(Vector_(3, 0.3, 0.3, 0.1));
Key priorKey = 0;
newFactors.add(PriorFactor<Pose2>(priorKey, priorMean, priorNoise));
newFactors.push_back(PriorFactor<Pose2>(priorKey, priorMean, priorNoise));
newValues.insert(priorKey, priorMean); // Initialize the first pose at the mean of the prior
newTimestamps[priorKey] = 0.0; // Set the timestamp associated with this key to 0.0 seconds;
@ -105,11 +105,11 @@ int main(int argc, char** argv) {
// Add odometry factors from two different sources with different error stats
Pose2 odometryMeasurement1 = Pose2(0.61, -0.08, 0.02);
noiseModel::Diagonal::shared_ptr odometryNoise1 = noiseModel::Diagonal::Sigmas(Vector_(3, 0.1, 0.1, 0.05));
newFactors.add(BetweenFactor<Pose2>(previousKey, currentKey, odometryMeasurement1, odometryNoise1));
newFactors.push_back(BetweenFactor<Pose2>(previousKey, currentKey, odometryMeasurement1, odometryNoise1));
Pose2 odometryMeasurement2 = Pose2(0.47, 0.03, 0.01);
noiseModel::Diagonal::shared_ptr odometryNoise2 = noiseModel::Diagonal::Sigmas(Vector_(3, 0.05, 0.05, 0.05));
newFactors.add(BetweenFactor<Pose2>(previousKey, currentKey, odometryMeasurement2, odometryNoise2));
newFactors.push_back(BetweenFactor<Pose2>(previousKey, currentKey, odometryMeasurement2, odometryNoise2));
// Update the smoothers with the new factors
smootherBatch.update(newFactors, newValues, newTimestamps);

View File

@ -129,7 +129,7 @@ int main(int argc, char** argv) {
Pose2 pose0 = Pose2(-34.2086489999201, 45.3007639991120,
M_PI - 2.02108900000000);
NonlinearFactorGraph newFactors;
newFactors.add(PriorFactor<Pose2>(0, pose0, priorNoise));
newFactors.push_back(PriorFactor<Pose2>(0, pose0, priorNoise));
ofstream os2(
"/Users/dellaert/borg/gtsam/gtsam_unstable/examples/rangeResultLM.txt");
@ -154,7 +154,7 @@ int main(int argc, char** argv) {
if (smart) {
BOOST_FOREACH(size_t jj,ids) {
smartFactors[jj] = SmartPtr(new SmartRangeFactor(sigmaR));
newFactors.add(smartFactors[jj]);
newFactors.push_back(smartFactors[jj]);
}
}
@ -173,7 +173,7 @@ int main(int argc, char** argv) {
boost::tie(t, odometry) = timedOdometry;
// add odometry factor
newFactors.add(
newFactors.push_back(
BetweenFactor<Pose2>(i - 1, i, odometry,
NM::Diagonal::Sigmas(odoSigmas)));
@ -198,7 +198,7 @@ int main(int argc, char** argv) {
// Throw out obvious outliers based on current landmark estimates
Vector error = factor.unwhitenedError(landmarkEstimates);
if (k <= 200 || fabs(error[0]) < 5)
newFactors.add(factor);
newFactors.push_back(factor);
}
totalCount += 1;
}

View File

@ -124,7 +124,7 @@ int main(int argc, char** argv) {
// Add prior on first pose
Pose2 pose0 = Pose2(-34.2086489999201, 45.3007639991120, -2.02108900000000);
NonlinearFactorGraph newFactors;
newFactors.add(PriorFactor<Pose2>(0, pose0, priorNoise));
newFactors.push_back(PriorFactor<Pose2>(0, pose0, priorNoise));
// initialize points (Gaussian)
Values initial;
@ -150,7 +150,7 @@ int main(int argc, char** argv) {
boost::tie(t, odometry) = timedOdometry;
// add odometry factor
newFactors.add(
newFactors.push_back(
BetweenFactor<Pose2>(i - 1, i, odometry,
NM::Diagonal::Sigmas(odoSigmas)));
@ -168,7 +168,7 @@ int main(int argc, char** argv) {
// Throw out obvious outliers based on current landmark estimates
Vector error = factor.unwhitenedError(landmarkEstimates);
if (k <= 200 || fabs(error[0]) < 5)
newFactors.add(factor);
newFactors.push_back(factor);
k = k + 1;
countK = countK + 1;
}

View File

@ -20,10 +20,8 @@ ${gtsam_unstable-default})
# Exclude tests that don't work
set (nonlinear_excluded_tests #"")
"${CMAKE_CURRENT_SOURCE_DIR}/tests/testConcurrentIncrementalFilter.cpp"
"${CMAKE_CURRENT_SOURCE_DIR}/tests/testConcurrentIncrementalSmootherDL.cpp"
"${CMAKE_CURRENT_SOURCE_DIR}/tests/testConcurrentIncrementalSmootherGN.cpp"
"${CMAKE_CURRENT_SOURCE_DIR}/tests/testIncrementalFixedLagSmoother.cpp"
"${CMAKE_CURRENT_SOURCE_DIR}/tests/testConcurrentIncrementalFilter.cpp"
#"${CMAKE_CURRENT_SOURCE_DIR}/tests/testIncrementalFixedLagSmoother.cpp"
)

View File

@ -61,10 +61,10 @@ public:
/** default constructor */
FixedLagSmoother(double smootherLag = 0.0) : smootherLag_(smootherLag) { };
FixedLagSmoother(double smootherLag = 0.0) : smootherLag_(smootherLag) { }
/** destructor */
virtual ~FixedLagSmoother() { };
virtual ~FixedLagSmoother() { }
/** Print the factor for debugging and testing (implementing Testable) */
virtual void print(const std::string& s = "FixedLagSmoother:\n", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;

View File

@ -39,11 +39,11 @@ public:
typedef boost::shared_ptr<IncrementalFixedLagSmoother> shared_ptr;
/** default constructor */
IncrementalFixedLagSmoother(double smootherLag = 0.0, const ISAM2Params& parameters = ISAM2Params()) :
FixedLagSmoother(smootherLag), isam_(parameters) { };
IncrementalFixedLagSmoother(double smootherLag = 0.0, const ISAM2Params& parameters = ISAM2Params())
: FixedLagSmoother(smootherLag), isam_(parameters) {}
/** destructor */
virtual ~IncrementalFixedLagSmoother() { };
virtual ~IncrementalFixedLagSmoother() {}
/** Print the factor for debugging and testing (implementing Testable) */
virtual void print(const std::string& s = "IncrementalFixedLagSmoother:\n", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;

View File

@ -23,10 +23,10 @@
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/LinearContainerFactor.h>
#include <gtsam/nonlinear/Ordering.h>
#include <gtsam/nonlinear/Values.h>
#include <gtsam/nonlinear/Symbol.h>
#include <gtsam/nonlinear/Key.h>
#include <gtsam/inference/Key.h>
#include <gtsam/inference/Ordering.h>
#include <gtsam/inference/JunctionTree.h>
#include <gtsam/geometry/Pose3.h>
#include <gtsam/base/TestableAssertions.h>
@ -463,7 +463,7 @@ TEST( ConcurrentIncrementalSmootherDL, synchronize_2 )
filterSeparatorValues.insert(1, Pose3().compose(poseError));
filterSeparatorValues.insert(2, filterSeparatorValues.at<Pose3>(1).compose(poseOdometry).compose(poseError));
filterSumarization.push_back(LinearContainerFactor(PriorFactor<Pose3>(1, poseInitial, noisePrior).linearize(filterSeparatorValues), filterSeparatorValues));
filterSumarization.push_back(LinearContainerFactor(BetweenFactor<Pose3>(1, 2, poseOdometry, noiseOdometery).linearize(filterSeparatorValues,), filterSeparatorValues));
filterSumarization.push_back(LinearContainerFactor(BetweenFactor<Pose3>(1, 2, poseOdometry, noiseOdometery).linearize(filterSeparatorValues), filterSeparatorValues));
smootherFactors.push_back(BetweenFactor<Pose3>(2, 3, poseOdometry, noiseOdometery));
smootherFactors.push_back(BetweenFactor<Pose3>(3, 4, poseOdometry, noiseOdometery));
smootherValues.insert(3, filterSeparatorValues.at<Pose3>(2).compose(poseOdometry).compose(poseError));
@ -573,23 +573,21 @@ TEST( ConcurrentIncrementalSmootherDL, synchronize_3 )
// Check the optimized value of smoother state
NonlinearFactorGraph allFactors = smootherFactors;
Values allValues = smoother.getLinearizationPoint();
ordering = smoother.getOrdering(); // I'm really hoping this is an acceptable ordering...
GaussianFactorGraph::shared_ptr LinFactorGraph = allFactors.linearize(allValues, ordering);
GaussianFactorGraph::shared_ptr LinFactorGraph = allFactors.linearize(allValues);
// GaussianSequentialSolver GSS = GaussianSequentialSolver(*LinFactorGraph);
// GaussianBayesNet::shared_ptr GBNsptr = GSS.eliminate();
FastSet<Index> allkeys = LinFactorGraph->keys();
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, filterSeparatorValues) {
Index index = ordering.at(key_value.key);
allkeys.erase(index);
allkeys.erase(key_value.key);
}
std::vector<Index> variables(allkeys.begin(), allkeys.end());
std::pair<GaussianConditional::shared_ptr, GaussianFactorGraph> result = LinFactorGraph->eliminate(variables, EliminateCholesky);
std::pair<GaussianBayesNet::shared_ptr, GaussianFactorGraph::shared_ptr> result = LinFactorGraph->eliminatePartialSequential(variables, EliminateCholesky);
expectedSmootherSummarization.resize(0);
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, result.second) {
expectedSmootherSummarization.push_back(LinearContainerFactor(factor, ordering, allValues));
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, *result.second) {
expectedSmootherSummarization.push_back(LinearContainerFactor(factor, allValues));
}
CHECK(assert_equal(expectedSmootherSummarization, actualSmootherSummarization, 1e-6));

View File

@ -399,8 +399,8 @@ TEST( ConcurrentIncrementalSmootherGN, synchronize_1 )
Ordering ordering;
ordering.push_back(1);
ordering.push_back(2);
filterSumarization.push_back(LinearContainerFactor(PriorFactor<Pose3>(1, poseInitial, noisePrior).linearize(filterSeparatorValues, ordering), ordering, filterSeparatorValues));
filterSumarization.push_back(LinearContainerFactor(BetweenFactor<Pose3>(1, 2, poseOdometry, noiseOdometery).linearize(filterSeparatorValues, ordering), ordering, filterSeparatorValues));
filterSumarization.push_back(LinearContainerFactor(PriorFactor<Pose3>(1, poseInitial, noisePrior).linearize(filterSeparatorValues), filterSeparatorValues));
filterSumarization.push_back(LinearContainerFactor(BetweenFactor<Pose3>(1, 2, poseOdometry, noiseOdometery).linearize(filterSeparatorValues), filterSeparatorValues));
// Create expected values: the smoother output will be empty for this case
NonlinearFactorGraph expectedSmootherSummarization;
@ -582,16 +582,14 @@ TEST( ConcurrentIncrementalSmootherGN, synchronize_3 )
// GaussianBayesNet::shared_ptr GBNsptr = GSS.eliminate();
FastSet<Index> allkeys = LinFactorGraph->keys();
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, filterSeparatorValues) {
Index index = ordering.at(key_value.key);
allkeys.erase(index);
}
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, filterSeparatorValues)
allkeys.erase(key_value.key);
std::vector<Index> variables(allkeys.begin(), allkeys.end());
std::pair<GaussianConditional::shared_ptr, GaussianFactorGraph> result = LinFactorGraph->eliminate(variables, EliminateCholesky);
std::pair<GaussianBayesNet::shared_ptr, GaussianFactorGraph::shared_ptr> result = LinFactorGraph->eliminatePartialSequential(variables, EliminateCholesky);
expectedSmootherSummarization.resize(0);
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, result.second) {
expectedSmootherSummarization.push_back(LinearContainerFactor(factor, ordering, allValues));
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, *result.second) {
expectedSmootherSummarization.push_back(LinearContainerFactor(factor, allValues));
}
CHECK(assert_equal(expectedSmootherSummarization, actualSmootherSummarization, 1e-6));