change to input params, add docstring

release/4.3a0
Akshay Krishnan 2022-05-05 18:40:58 -07:00
parent 3ea19f4bd0
commit 1d6fd5409a
3 changed files with 129 additions and 56 deletions

View File

@ -28,6 +28,7 @@
#include <gtsam/nonlinear/Values.h>
#include <gtsam/sfm/TranslationFactor.h>
#include <gtsam/sfm/TranslationRecovery.h>
#include <gtsam/slam/BetweenFactor.h>
#include <gtsam/slam/PriorFactor.h>
#include <gtsam/slam/expressions.h>
@ -42,8 +43,8 @@ static std::mt19937 kRandomNumberGenerator(42);
TranslationRecovery::TranslationRecovery(
const TranslationRecovery::TranslationEdges &relativeTranslations,
const LevenbergMarquardtParams &lmParams)
: params_(lmParams) {
const TranslationRecoveryParams &params)
: params_(params) {
// Some relative translations may be zero. We treat nodes that have a zero
// relativeTranslation as a single node.
@ -73,12 +74,21 @@ TranslationRecovery::TranslationRecovery(
NonlinearFactorGraph TranslationRecovery::buildGraph() const {
NonlinearFactorGraph graph;
// Add all relative translation edges
// Add translation factors for input translation directions.
for (auto edge : relativeTranslations_) {
graph.emplace_shared<TranslationFactor>(edge.key1(), edge.key2(),
edge.measured(), edge.noiseModel());
}
// Add between factors for optional relative translations.
for (auto edge : params_.getBetweenTranslations()) {
Key k1 = getUniqueKey(edge.key1()), k2 = getUniqueKey(edge.key2());
if (k1 != k2) {
graph.emplace_shared<BetweenFactor<Point3>>(k1, k2, edge.measured(),
edge.noiseModel());
}
}
return graph;
}
@ -87,17 +97,13 @@ void TranslationRecovery::addPrior(
const SharedNoiseModel &priorNoiseModel) const {
auto edge = relativeTranslations_.begin();
if (edge == relativeTranslations_.end()) return;
graph->emplace_shared<PriorFactor<Point3> >(edge->key1(), Point3(0, 0, 0),
priorNoiseModel);
graph->emplace_shared<PriorFactor<Point3> >(
edge->key2(), scale * edge->measured().point3(), edge->noiseModel());
}
void TranslationRecovery::addPrior(
Key i, const Point3 &prior,
const boost::shared_ptr<NonlinearFactorGraph> graph,
const SharedNoiseModel &priorNoiseModel) const {
graph->addPrior(getUniqueKey(i), prior, priorNoiseModel);
graph->emplace_shared<PriorFactor<Point3>>(edge->key1(), Point3(0, 0, 0),
priorNoiseModel);
// Add a scale prior only if no other between factors were added.
if (params_.getBetweenTranslations().empty()) {
graph->emplace_shared<PriorFactor<Point3>>(
edge->key2(), scale * edge->measured().point3(), edge->noiseModel());
}
}
Key TranslationRecovery::getUniqueKey(const Key i) const {
@ -110,25 +116,21 @@ Key TranslationRecovery::getUniqueKey(const Key i) const {
return i;
}
void TranslationRecovery::addRelativeHardConstraint(
Key i, Key j, const Point3 &w_itj,
const boost::shared_ptr<NonlinearFactorGraph> graph) const {
Point3_ wti_(getUniqueKey(i)), wtj_(getUniqueKey(j));
Expression<Point3> w_itj_ = wtj_ - wti_;
graph->addExpressionFactor(noiseModel::Constrained::All(3, 1e9), w_itj,
w_itj_);
}
Values TranslationRecovery::initializeRandomly(std::mt19937 *rng) const {
uniform_real_distribution<double> randomVal(-1, 1);
// Create a lambda expression that checks whether value exists and randomly
// initializes if not.
Values initial;
const Values inputInitial = params_.getInitialValues();
auto insert = [&](Key j) {
if (!initial.exists(j)) {
if (initial.exists(j)) return;
if (inputInitial.exists(j)) {
initial.insert<Point3>(j, inputInitial.at<Point3>(j));
} else {
initial.insert<Point3>(
j, Point3(randomVal(*rng), randomVal(*rng), randomVal(*rng)));
}
// Assumes all nodes connected by zero-edges have the same initialization.
};
// Loop over measurements and add a random translation
@ -157,12 +159,13 @@ Values TranslationRecovery::run(const double scale) const {
boost::make_shared<NonlinearFactorGraph>(buildGraph());
addPrior(scale, graph_ptr);
const Values initial = initializeRandomly();
LevenbergMarquardtOptimizer lm(*graph_ptr, initial, params_);
LevenbergMarquardtOptimizer lm(*graph_ptr, initial, params_.getLMParams());
Values result = lm.optimize();
return addDuplicateNodes(result);
return addSameTranslationNodes(result);
}
Values TranslationRecovery::addDuplicateNodes(const Values &result) const {
Values TranslationRecovery::addSameTranslationNodes(
const Values &result) const {
Values final_result = result;
// Nodes that were not optimized are stored in sameTranslationNodes_ as a map
// from a key that was optimized to keys that were not optimized. Iterate over

View File

@ -28,6 +28,40 @@
namespace gtsam {
// Parameters for the Translation Recovery problem.
class TranslationRecoveryParams {
public:
std::vector<BinaryMeasurement<Point3>> getBetweenTranslations() const {
return betweenTranslations_;
}
void setBetweenTranslations(
const std::vector<BinaryMeasurement<Point3>> &betweenTranslations) {
betweenTranslations_ = betweenTranslations;
}
LevenbergMarquardtParams getLMParams() const { return lmParams_; }
Values getInitialValues() const { return initial_; }
void setInitialValues(const Values &values) { initial_ = values; }
void setLMParams(const LevenbergMarquardtParams &lmParams) {
lmParams_ = lmParams;
}
private:
// Relative translations with a known scale used as between factors in the
// problem if provided.
std::vector<BinaryMeasurement<Point3>> betweenTranslations_;
// LevenbergMarquardtParams for optimization.
LevenbergMarquardtParams lmParams_;
// Initial values, random intialization will be used if not provided.
Values initial_;
};
// Set up an optimization problem for the unknown translations Ti in the world
// coordinate frame, given the known camera attitudes wRi with respect to the
// world frame, and a set of (noisy) translation directions of type Unit3,
@ -57,8 +91,8 @@ class TranslationRecovery {
// Translation directions between camera pairs.
TranslationEdges relativeTranslations_;
// Parameters used by the LM Optimizer.
LevenbergMarquardtParams params_;
// Parameters.
TranslationRecoveryParams params_;
// Map from a key in the graph to a set of keys that share the same
// translation.
@ -71,13 +105,11 @@ class TranslationRecovery {
* @param relativeTranslations the relative translations, in world coordinate
* frames, vector of BinaryMeasurements of Unit3, where each key of a
* measurement is a point in 3D.
* @param lmParams (optional) gtsam::LavenbergMarquardtParams that can be
* used to modify the parameters for the LM optimizer. By default, uses the
* default LM parameters.
* @param params (optional) parameters for the recovery problem.
*/
TranslationRecovery(
const TranslationEdges &relativeTranslations,
const LevenbergMarquardtParams &lmParams = LevenbergMarquardtParams());
const TranslationRecoveryParams &params = TranslationRecoveryParams());
/**
* @brief Build the factor graph to do the optimization.
@ -98,15 +130,6 @@ class TranslationRecovery {
const SharedNoiseModel &priorNoiseModel =
noiseModel::Isotropic::Sigma(3, 0.01)) const;
void addPrior(Key i, const Point3 &prior,
const boost::shared_ptr<NonlinearFactorGraph> graph,
const SharedNoiseModel &priorNoiseModel =
noiseModel::Isotropic::Sigma(3, 0.01)) const;
void addRelativeHardConstraint(
Key i, Key j, const Point3 &w_itj,
const boost::shared_ptr<NonlinearFactorGraph> graph) const;
/**
* @brief Create random initial translations.
*
@ -126,12 +149,11 @@ class TranslationRecovery {
* @brief Build and optimize factor graph.
*
* @param scale scale for first relative translation which fixes gauge.
* The scale is only used if relativeTranslations in the params is empty.
* @return Values
*/
Values run(const double scale = 1.0) const;
Values addDuplicateNodes(const Values &result) const;
/**
* @brief Simulate translation direction measurements
*
@ -145,6 +167,23 @@ class TranslationRecovery {
const Values &poses, const std::vector<KeyPair> &edges);
private:
/**
* @brief Gets the key of the variable being optimized among multiple input
* variables that have the same translation.
*
* @param i key of input variable.
* @return Key of optimized variable - same as input if it does not have any
* zero-translation edges.
*/
Key getUniqueKey(const Key i) const;
/**
* @brief Adds nodes that were not optimized for because they were connected
* to another node with a zero-translation edge in the input.
*
* @param result optimization problem result
* @return translation estimates for all variables in the input.
*/
Values addSameTranslationNodes(const Values &result) const;
};
} // namespace gtsam

View File

@ -265,6 +265,41 @@ TEST(TranslationRecovery, ThreePosesWithZeroTranslation) {
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(2), 1e-8));
}
TEST(TranslationRecovery, ThreePosesWithOneSoftConstraint) {
// Create a dataset with 3 poses.
// __ __
// \/ \/
// 0 _____ 1
// \ __ /
// \\//
// 3
//
// 0 and 1 face in the same direction but have a translation offset. 3 is in
// the same direction as 0 and 1, in between 0 and 1, with some Y axis offset.
Values poses;
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
poses.insert<Pose3>(3, Pose3(Rot3(), Point3(1, -1, 0)));
auto relativeTranslations =
TranslationRecovery::SimulateMeasurements(poses, {{0, 1}, {0, 3}, {1, 3}});
TranslationRecoveryParams params;
std::vector<BinaryMeasurement<Point3>> betweenTranslations;
betweenTranslations.emplace_back(0, 3, Point3(1, -1, 0), noiseModel::Isotropic::Sigma(3, 1e-2));
params.setBetweenTranslations(betweenTranslations);
TranslationRecovery algorithm(relativeTranslations, params);
auto result = algorithm.run();
// Check result
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-4));
EXPECT(assert_equal(Point3(2, 0, 0), result.at<Point3>(1), 1e-4));
EXPECT(assert_equal(Point3(1, -1, 0), result.at<Point3>(3), 1e-4));
}
TEST(TranslationRecovery, ThreePosesWithOneHardConstraint) {
// Create a dataset with 3 poses.
// __ __
@ -283,25 +318,21 @@ TEST(TranslationRecovery, ThreePosesWithOneHardConstraint) {
poses.insert<Pose3>(3, Pose3(Rot3(), Point3(1, -1, 0)));
auto relativeTranslations =
TranslationRecovery::SimulateMeasurements(poses, {{0, 1}, {3, 0}});
TranslationRecovery::SimulateMeasurements(poses, {{0, 1}, {0, 3}, {1, 3}});
TranslationRecovery algorithm(relativeTranslations);
boost::shared_ptr<NonlinearFactorGraph> graph_ptr =
boost::make_shared<NonlinearFactorGraph>(algorithm.buildGraph());
algorithm.addPrior(0, Point3(), graph_ptr);
algorithm.addRelativeHardConstraint(0, 1, Point3(2, 0, 0), graph_ptr);
const Values initial = algorithm.initializeRandomly();
LevenbergMarquardtParams params;
LevenbergMarquardtOptimizer lm(*graph_ptr, initial, params);
auto result = algorithm.addDuplicateNodes(lm.optimize());
EXPECT_LONGS_EQUAL(4, graph_ptr->size());
TranslationRecoveryParams params;
std::vector<BinaryMeasurement<Point3>> betweenTranslations;
betweenTranslations.emplace_back(0, 1, Point3(2, 0, 0), noiseModel::Constrained::All(3, 1e2));
params.setBetweenTranslations(betweenTranslations);
TranslationRecovery algorithm(relativeTranslations, params);
auto result = algorithm.run();
// Check result
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-4));
EXPECT(assert_equal(Point3(2, 0, 0), result.at<Point3>(1), 1e-4));
EXPECT(assert_equal(Point3(1, -1, 0), result.at<Point3>(3), 1e-4));
}
/* ************************************************************************* */
int main() {
TestResult tr;