move relativeTranslations to run()
parent
c1a7cf21d5
commit
230bb8eb11
|
@ -41,16 +41,13 @@ using namespace std;
|
||||||
// In Wrappers we have no access to this so have a default ready.
|
// In Wrappers we have no access to this so have a default ready.
|
||||||
static std::mt19937 kRandomNumberGenerator(42);
|
static std::mt19937 kRandomNumberGenerator(42);
|
||||||
|
|
||||||
TranslationRecovery::TranslationRecovery(
|
// Some relative translations may be zero. We treat nodes that have a zero
|
||||||
const TranslationRecovery::TranslationEdges &relativeTranslations,
|
// relativeTranslation as a single node.
|
||||||
const TranslationRecoveryParams ¶ms)
|
// A DSFMap is used to find sets of nodes that have a zero relative
|
||||||
: params_(params) {
|
// translation. Add the nodes in each edge to the DSFMap, and merge nodes that
|
||||||
// Some relative translations may be zero. We treat nodes that have a zero
|
// are connected by a zero relative translation.
|
||||||
// relativeTranslation as a single node.
|
DSFMap<Key> getSameTranslationDSFMap(
|
||||||
|
const std::vector<BinaryMeasurement<Unit3>> &relativeTranslations) {
|
||||||
// A DSFMap is used to find sets of nodes that have a zero relative
|
|
||||||
// translation. Add the nodes in each edge to the DSFMap, and merge nodes that
|
|
||||||
// are connected by a zero relative translation.
|
|
||||||
DSFMap<Key> sameTranslationDSF;
|
DSFMap<Key> sameTranslationDSF;
|
||||||
for (const auto &edge : relativeTranslations) {
|
for (const auto &edge : relativeTranslations) {
|
||||||
Key key1 = sameTranslationDSF.find(edge.key1());
|
Key key1 = sameTranslationDSF.find(edge.key1());
|
||||||
|
@ -59,23 +56,52 @@ TranslationRecovery::TranslationRecovery(
|
||||||
sameTranslationDSF.merge(key1, key2);
|
sameTranslationDSF.merge(key1, key2);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Use only those edges for which two keys have a distinct root in the DSFMap.
|
return sameTranslationDSF;
|
||||||
for (const auto &edge : relativeTranslations) {
|
|
||||||
Key key1 = sameTranslationDSF.find(edge.key1());
|
|
||||||
Key key2 = sameTranslationDSF.find(edge.key2());
|
|
||||||
if (key1 == key2) continue;
|
|
||||||
relativeTranslations_.emplace_back(key1, key2, edge.measured(),
|
|
||||||
edge.noiseModel());
|
|
||||||
}
|
|
||||||
// Store the DSF map for post-processing results.
|
|
||||||
sameTranslationNodes_ = sameTranslationDSF.sets();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
NonlinearFactorGraph TranslationRecovery::buildGraph() const {
|
// Removes zero-translation edges from measurements, and combines the nodes in
|
||||||
|
// these edges into a single node.
|
||||||
|
template <typename T>
|
||||||
|
std::vector<BinaryMeasurement<T>> removeSameTranslationNodes(
|
||||||
|
const std::vector<BinaryMeasurement<T>> &edges,
|
||||||
|
const DSFMap<Key> &sameTranslationDSFMap) {
|
||||||
|
std::vector<BinaryMeasurement<T>> newEdges;
|
||||||
|
for (const auto &edge : edges) {
|
||||||
|
Key key1 = sameTranslationDSFMap.find(edge.key1());
|
||||||
|
Key key2 = sameTranslationDSFMap.find(edge.key2());
|
||||||
|
if (key1 == key2) continue;
|
||||||
|
newEdges.emplace_back(key1, key2, edge.measured(), edge.noiseModel());
|
||||||
|
}
|
||||||
|
return newEdges;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Adds nodes that were not optimized for because they were connected
|
||||||
|
// to another node with a zero-translation edge in the input.
|
||||||
|
Values addSameTranslationNodes(const Values &result,
|
||||||
|
const DSFMap<Key> &sameTranslationDSFMap) {
|
||||||
|
Values final_result = result;
|
||||||
|
// Nodes that were not optimized are stored in sameTranslationNodes_ as a map
|
||||||
|
// from a key that was optimized to keys that were not optimized. Iterate over
|
||||||
|
// map and add results for keys not optimized.
|
||||||
|
for (const auto &optimizedAndDuplicateKeys : sameTranslationDSFMap.sets()) {
|
||||||
|
Key optimizedKey = optimizedAndDuplicateKeys.first;
|
||||||
|
std::set<Key> duplicateKeys = optimizedAndDuplicateKeys.second;
|
||||||
|
// Add the result for the duplicate key if it does not already exist.
|
||||||
|
for (const Key duplicateKey : duplicateKeys) {
|
||||||
|
if (final_result.exists(duplicateKey)) continue;
|
||||||
|
final_result.insert<Point3>(duplicateKey,
|
||||||
|
final_result.at<Point3>(optimizedKey));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return final_result;
|
||||||
|
}
|
||||||
|
|
||||||
|
NonlinearFactorGraph TranslationRecovery::buildGraph(
|
||||||
|
const std::vector<BinaryMeasurement<Unit3>> &relativeTranslations) const {
|
||||||
NonlinearFactorGraph graph;
|
NonlinearFactorGraph graph;
|
||||||
|
|
||||||
// Add translation factors for input translation directions.
|
// Add translation factors for input translation directions.
|
||||||
for (auto edge : relativeTranslations_) {
|
for (auto edge : relativeTranslations) {
|
||||||
graph.emplace_shared<TranslationFactor>(edge.key1(), edge.key2(),
|
graph.emplace_shared<TranslationFactor>(edge.key1(), edge.key2(),
|
||||||
edge.measured(), edge.noiseModel());
|
edge.measured(), edge.noiseModel());
|
||||||
}
|
}
|
||||||
|
@ -83,22 +109,20 @@ NonlinearFactorGraph TranslationRecovery::buildGraph() const {
|
||||||
}
|
}
|
||||||
|
|
||||||
void TranslationRecovery::addPrior(
|
void TranslationRecovery::addPrior(
|
||||||
|
const std::vector<BinaryMeasurement<Unit3>> &relativeTranslations,
|
||||||
|
const double scale,
|
||||||
const std::vector<BinaryMeasurement<Point3>> &betweenTranslations,
|
const std::vector<BinaryMeasurement<Point3>> &betweenTranslations,
|
||||||
const double scale, NonlinearFactorGraph *graph,
|
NonlinearFactorGraph *graph,
|
||||||
const SharedNoiseModel &priorNoiseModel) const {
|
const SharedNoiseModel &priorNoiseModel) const {
|
||||||
auto edge = relativeTranslations_.begin();
|
auto edge = relativeTranslations.begin();
|
||||||
if (edge == relativeTranslations_.end()) return;
|
if (edge == relativeTranslations.end()) return;
|
||||||
graph->emplace_shared<PriorFactor<Point3>>(edge->key1(), Point3(0, 0, 0),
|
graph->emplace_shared<PriorFactor<Point3>>(edge->key1(), Point3(0, 0, 0),
|
||||||
priorNoiseModel);
|
priorNoiseModel);
|
||||||
|
|
||||||
// Add between factors for optional relative translations.
|
// Add between factors for optional relative translations.
|
||||||
for (auto edge : betweenTranslations) {
|
for (auto edge : betweenTranslations) {
|
||||||
Key k1 = getSameTranslationRootNode(edge.key1()),
|
graph->emplace_shared<BetweenFactor<Point3>>(
|
||||||
k2 = getSameTranslationRootNode(edge.key2());
|
edge.key1(), edge.key2(), edge.measured(), edge.noiseModel());
|
||||||
if (k1 != k2) {
|
|
||||||
graph->emplace_shared<BetweenFactor<Point3>>(k1, k2, edge.measured(),
|
|
||||||
edge.noiseModel());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add a scale prior only if no other between factors were added.
|
// Add a scale prior only if no other between factors were added.
|
||||||
|
@ -108,17 +132,9 @@ void TranslationRecovery::addPrior(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Key TranslationRecovery::getSameTranslationRootNode(const Key i) const {
|
Values TranslationRecovery::initializeRandomly(
|
||||||
for (const auto &optimizedAndDuplicateKeys : sameTranslationNodes_) {
|
const std::vector<BinaryMeasurement<Unit3>> &relativeTranslations,
|
||||||
Key optimizedKey = optimizedAndDuplicateKeys.first;
|
std::mt19937 *rng) const {
|
||||||
std::set<Key> duplicateKeys = optimizedAndDuplicateKeys.second;
|
|
||||||
if (i == optimizedKey || duplicateKeys.count(i)) return optimizedKey;
|
|
||||||
}
|
|
||||||
// Unlikely case, when i is not in the graph.
|
|
||||||
return i;
|
|
||||||
}
|
|
||||||
|
|
||||||
Values TranslationRecovery::initializeRandomly(std::mt19937 *rng) const {
|
|
||||||
uniform_real_distribution<double> randomVal(-1, 1);
|
uniform_real_distribution<double> randomVal(-1, 1);
|
||||||
// Create a lambda expression that checks whether value exists and randomly
|
// Create a lambda expression that checks whether value exists and randomly
|
||||||
// initializes if not.
|
// initializes if not.
|
||||||
|
@ -135,54 +151,53 @@ Values TranslationRecovery::initializeRandomly(std::mt19937 *rng) const {
|
||||||
};
|
};
|
||||||
|
|
||||||
// Loop over measurements and add a random translation
|
// Loop over measurements and add a random translation
|
||||||
for (auto edge : relativeTranslations_) {
|
for (auto edge : relativeTranslations) {
|
||||||
insert(edge.key1());
|
insert(edge.key1());
|
||||||
insert(edge.key2());
|
insert(edge.key2());
|
||||||
}
|
}
|
||||||
|
|
||||||
// If there are no valid edges, but zero-distance edges exist, initialize one
|
|
||||||
// of the nodes in a connected component of zero-distance edges.
|
|
||||||
if (initial.empty() && !sameTranslationNodes_.empty()) {
|
|
||||||
for (const auto &optimizedAndDuplicateKeys : sameTranslationNodes_) {
|
|
||||||
Key optimizedKey = optimizedAndDuplicateKeys.first;
|
|
||||||
initial.insert<Point3>(optimizedKey, Point3(0, 0, 0));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return initial;
|
return initial;
|
||||||
}
|
}
|
||||||
|
|
||||||
Values TranslationRecovery::initializeRandomly() const {
|
Values TranslationRecovery::initializeRandomly(
|
||||||
return initializeRandomly(&kRandomNumberGenerator);
|
const std::vector<BinaryMeasurement<Unit3>> &relativeTranslations) const {
|
||||||
|
return initializeRandomly(relativeTranslations, &kRandomNumberGenerator);
|
||||||
}
|
}
|
||||||
|
|
||||||
Values TranslationRecovery::run(
|
Values TranslationRecovery::run(
|
||||||
const std::vector<BinaryMeasurement<Point3>> &betweenTranslations,
|
const TranslationEdges &relativeTranslations, const double scale,
|
||||||
const double scale) const {
|
const std::vector<BinaryMeasurement<Point3>> &betweenTranslations) const {
|
||||||
NonlinearFactorGraph graph = buildGraph();
|
// Find edges that have a zero-translation, and recompute relativeTranslations
|
||||||
addPrior(betweenTranslations, scale, &graph);
|
// and betweenTranslations by retaining only one node for every zero-edge.
|
||||||
const Values initial = initializeRandomly();
|
DSFMap<Key> sameTranslationDSFMap =
|
||||||
LevenbergMarquardtOptimizer lm(graph, initial, params_.lmParams);
|
getSameTranslationDSFMap(relativeTranslations);
|
||||||
Values result = lm.optimize();
|
const TranslationEdges nonzeroRelativeTranslations =
|
||||||
return addSameTranslationNodes(result);
|
removeSameTranslationNodes(relativeTranslations, sameTranslationDSFMap);
|
||||||
}
|
const std::vector<BinaryMeasurement<Point3>> nonzeroBetweenTranslations =
|
||||||
|
removeSameTranslationNodes(betweenTranslations, sameTranslationDSFMap);
|
||||||
|
|
||||||
Values TranslationRecovery::addSameTranslationNodes(
|
// Create graph of translation factors.
|
||||||
const Values &result) const {
|
NonlinearFactorGraph graph = buildGraph(nonzeroRelativeTranslations);
|
||||||
Values final_result = result;
|
|
||||||
// Nodes that were not optimized are stored in sameTranslationNodes_ as a map
|
// Add global frame prior and scale (either from betweenTranslations or
|
||||||
// from a key that was optimized to keys that were not optimized. Iterate over
|
// scale).
|
||||||
// map and add results for keys not optimized.
|
addPrior(nonzeroRelativeTranslations, scale, nonzeroBetweenTranslations,
|
||||||
for (const auto &optimizedAndDuplicateKeys : sameTranslationNodes_) {
|
&graph);
|
||||||
Key optimizedKey = optimizedAndDuplicateKeys.first;
|
|
||||||
std::set<Key> duplicateKeys = optimizedAndDuplicateKeys.second;
|
// Uses initial values from params if provided.
|
||||||
// Add the result for the duplicate key if it does not already exist.
|
Values initial = initializeRandomly(nonzeroRelativeTranslations);
|
||||||
for (const Key duplicateKey : duplicateKeys) {
|
|
||||||
if (final_result.exists(duplicateKey)) continue;
|
// If there are no valid edges, but zero-distance edges exist, initialize one
|
||||||
final_result.insert<Point3>(duplicateKey,
|
// of the nodes in a connected component of zero-distance edges.
|
||||||
final_result.at<Point3>(optimizedKey));
|
if (initial.empty() && !sameTranslationDSFMap.sets().empty()) {
|
||||||
|
for (const auto &optimizedAndDuplicateKeys : sameTranslationDSFMap.sets()) {
|
||||||
|
Key optimizedKey = optimizedAndDuplicateKeys.first;
|
||||||
|
initial.insert<Point3>(optimizedKey, Point3(0, 0, 0));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return final_result;
|
|
||||||
|
LevenbergMarquardtOptimizer lm(graph, initial, params_.lmParams);
|
||||||
|
Values result = lm.optimize();
|
||||||
|
return addSameTranslationNodes(result, sameTranslationDSFMap);
|
||||||
}
|
}
|
||||||
|
|
||||||
TranslationRecovery::TranslationEdges TranslationRecovery::SimulateMeasurements(
|
TranslationRecovery::TranslationEdges TranslationRecovery::SimulateMeasurements(
|
||||||
|
|
|
@ -69,29 +69,25 @@ class TranslationRecovery {
|
||||||
// Parameters.
|
// Parameters.
|
||||||
TranslationRecoveryParams params_;
|
TranslationRecoveryParams params_;
|
||||||
|
|
||||||
// Map from a key in the graph to a set of keys that share the same
|
|
||||||
// translation.
|
|
||||||
std::map<Key, std::set<Key>> sameTranslationNodes_;
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
/**
|
/**
|
||||||
* @brief Construct a new Translation Recovery object
|
* @brief Construct a new Translation Recovery object
|
||||||
*
|
*
|
||||||
* @param relativeTranslations the relative translations, in world coordinate
|
* @param params parameters for the recovery problem.
|
||||||
* frames, vector of BinaryMeasurements of Unit3, where each key of a
|
|
||||||
* measurement is a point in 3D.
|
|
||||||
* @param params (optional) parameters for the recovery problem.
|
|
||||||
*/
|
*/
|
||||||
TranslationRecovery(
|
TranslationRecovery(const TranslationRecoveryParams ¶ms)
|
||||||
const TranslationEdges &relativeTranslations,
|
: params_(params) {}
|
||||||
const TranslationRecoveryParams ¶ms = TranslationRecoveryParams());
|
|
||||||
|
// Same as above, with default parameters.
|
||||||
|
TranslationRecovery() = default;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Build the factor graph to do the optimization.
|
* @brief Build the factor graph to do the optimization.
|
||||||
*
|
*
|
||||||
* @return NonlinearFactorGraph
|
* @return NonlinearFactorGraph
|
||||||
*/
|
*/
|
||||||
NonlinearFactorGraph buildGraph() const;
|
NonlinearFactorGraph buildGraph(
|
||||||
|
const std::vector<BinaryMeasurement<Unit3>> &relativeTranslations) const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Add priors on ednpoints of first measurement edge.
|
* @brief Add priors on ednpoints of first measurement edge.
|
||||||
|
@ -101,8 +97,10 @@ class TranslationRecovery {
|
||||||
* @param priorNoiseModel the noise model to use with the prior.
|
* @param priorNoiseModel the noise model to use with the prior.
|
||||||
*/
|
*/
|
||||||
void addPrior(
|
void addPrior(
|
||||||
|
const std::vector<BinaryMeasurement<Unit3>> &relativeTranslations,
|
||||||
|
const double scale,
|
||||||
const std::vector<BinaryMeasurement<Point3>> &betweenTranslations,
|
const std::vector<BinaryMeasurement<Point3>> &betweenTranslations,
|
||||||
const double scale, NonlinearFactorGraph *graph,
|
NonlinearFactorGraph *graph,
|
||||||
const SharedNoiseModel &priorNoiseModel =
|
const SharedNoiseModel &priorNoiseModel =
|
||||||
noiseModel::Isotropic::Sigma(3, 0.01)) const;
|
noiseModel::Isotropic::Sigma(3, 0.01)) const;
|
||||||
|
|
||||||
|
@ -112,25 +110,34 @@ class TranslationRecovery {
|
||||||
* @param rng random number generator
|
* @param rng random number generator
|
||||||
* @return Values
|
* @return Values
|
||||||
*/
|
*/
|
||||||
Values initializeRandomly(std::mt19937 *rng) const;
|
Values initializeRandomly(
|
||||||
|
const std::vector<BinaryMeasurement<Unit3>> &relativeTranslations,
|
||||||
|
std::mt19937 *rng) const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Version of initializeRandomly with a fixed seed.
|
* @brief Version of initializeRandomly with a fixed seed.
|
||||||
*
|
*
|
||||||
* @return Values
|
* @return Values
|
||||||
*/
|
*/
|
||||||
Values initializeRandomly() const;
|
Values initializeRandomly(
|
||||||
|
const std::vector<BinaryMeasurement<Unit3>> &relativeTranslations) const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Build and optimize factor graph.
|
* @brief Build and optimize factor graph.
|
||||||
*
|
*
|
||||||
|
* @param relativeTranslations the relative translations, in world coordinate
|
||||||
|
* frames, vector of BinaryMeasurements of Unit3, where each key of a
|
||||||
|
* measurement is a point in 3D.
|
||||||
* @param scale scale for first relative translation which fixes gauge.
|
* @param scale scale for first relative translation which fixes gauge.
|
||||||
* The scale is only used if relativeTranslations in the params is empty.
|
* The scale is only used if betweenTranslations is empty.
|
||||||
|
* @param betweenTranslations relative translations (with scale) between 2
|
||||||
|
* points in world coordinate frame known a priori.
|
||||||
* @return Values
|
* @return Values
|
||||||
*/
|
*/
|
||||||
Values run(
|
Values run(const TranslationEdges &relativeTranslations,
|
||||||
const std::vector<BinaryMeasurement<Point3>> &betweenTranslations = {},
|
const double scale = 1.0,
|
||||||
const double scale = 1.0) const;
|
const std::vector<BinaryMeasurement<Point3>> &betweenTranslations =
|
||||||
|
{}) const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Simulate translation direction measurements
|
* @brief Simulate translation direction measurements
|
||||||
|
@ -143,25 +150,5 @@ class TranslationRecovery {
|
||||||
*/
|
*/
|
||||||
static TranslationEdges SimulateMeasurements(
|
static TranslationEdges SimulateMeasurements(
|
||||||
const Values &poses, const std::vector<KeyPair> &edges);
|
const Values &poses, const std::vector<KeyPair> &edges);
|
||||||
|
|
||||||
private:
|
|
||||||
/**
|
|
||||||
* @brief Gets the key of the variable being optimized among multiple input
|
|
||||||
* variables that have the same translation.
|
|
||||||
*
|
|
||||||
* @param i key of input variable.
|
|
||||||
* @return Key of optimized variable - same as input if it does not have any
|
|
||||||
* zero-translation edges.
|
|
||||||
*/
|
|
||||||
Key getSameTranslationRootNode(const Key i) const;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief Adds nodes that were not optimized for because they were connected
|
|
||||||
* to another node with a zero-translation edge in the input.
|
|
||||||
*
|
|
||||||
* @param result optimization problem result
|
|
||||||
* @return translation estimates for all variables in the input.
|
|
||||||
*/
|
|
||||||
Values addSameTranslationNodes(const Values &result) const;
|
|
||||||
};
|
};
|
||||||
} // namespace gtsam
|
} // namespace gtsam
|
||||||
|
|
|
@ -62,13 +62,13 @@ TEST(TranslationRecovery, BAL) {
|
||||||
unitTranslation.measured()));
|
unitTranslation.measured()));
|
||||||
}
|
}
|
||||||
|
|
||||||
TranslationRecovery algorithm(relativeTranslations);
|
TranslationRecovery algorithm;
|
||||||
const auto graph = algorithm.buildGraph();
|
const auto graph = algorithm.buildGraph(relativeTranslations);
|
||||||
EXPECT_LONGS_EQUAL(3, graph.size());
|
EXPECT_LONGS_EQUAL(3, graph.size());
|
||||||
|
|
||||||
// Run translation recovery
|
// Run translation recovery
|
||||||
const double scale = 2.0;
|
const double scale = 2.0;
|
||||||
const auto result = algorithm.run(/*betweenTranslations=*/{}, scale);
|
const auto result = algorithm.run(relativeTranslations, scale);
|
||||||
|
|
||||||
// Check result for first two translations, determined by prior
|
// Check result for first two translations, determined by prior
|
||||||
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0)));
|
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0)));
|
||||||
|
@ -107,12 +107,12 @@ TEST(TranslationRecovery, TwoPoseTest) {
|
||||||
unitTranslation.measured()));
|
unitTranslation.measured()));
|
||||||
}
|
}
|
||||||
|
|
||||||
TranslationRecovery algorithm(relativeTranslations);
|
TranslationRecovery algorithm;
|
||||||
const auto graph = algorithm.buildGraph();
|
const auto graph = algorithm.buildGraph(relativeTranslations);
|
||||||
EXPECT_LONGS_EQUAL(1, graph.size());
|
EXPECT_LONGS_EQUAL(1, graph.size());
|
||||||
|
|
||||||
// Run translation recovery
|
// Run translation recovery
|
||||||
const auto result = algorithm.run(/*betweenTranslations=*/{}, /*scale=*/3.0);
|
const auto result = algorithm.run(relativeTranslations, /*scale=*/3.0);
|
||||||
|
|
||||||
// Check result for first two translations, determined by prior
|
// Check result for first two translations, determined by prior
|
||||||
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
|
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
|
||||||
|
@ -145,11 +145,11 @@ TEST(TranslationRecovery, ThreePoseTest) {
|
||||||
unitTranslation.measured()));
|
unitTranslation.measured()));
|
||||||
}
|
}
|
||||||
|
|
||||||
TranslationRecovery algorithm(relativeTranslations);
|
TranslationRecovery algorithm;
|
||||||
const auto graph = algorithm.buildGraph();
|
const auto graph = algorithm.buildGraph(relativeTranslations);
|
||||||
EXPECT_LONGS_EQUAL(3, graph.size());
|
EXPECT_LONGS_EQUAL(3, graph.size());
|
||||||
|
|
||||||
const auto result = algorithm.run(/*betweenTranslations=*/{}, /*scale=*/3.0);
|
const auto result = algorithm.run(relativeTranslations, /*scale=*/3.0);
|
||||||
|
|
||||||
// Check result
|
// Check result
|
||||||
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
|
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
|
||||||
|
@ -180,13 +180,9 @@ TEST(TranslationRecovery, ThreePosesIncludingZeroTranslation) {
|
||||||
unitTranslation.measured()));
|
unitTranslation.measured()));
|
||||||
}
|
}
|
||||||
|
|
||||||
TranslationRecovery algorithm(relativeTranslations);
|
TranslationRecovery algorithm;
|
||||||
const auto graph = algorithm.buildGraph();
|
|
||||||
// There is only 1 non-zero translation edge.
|
|
||||||
EXPECT_LONGS_EQUAL(1, graph.size());
|
|
||||||
|
|
||||||
// Run translation recovery
|
// Run translation recovery
|
||||||
const auto result = algorithm.run(/*betweenTranslations=*/{}, /*scale=*/3.0);
|
const auto result = algorithm.run(relativeTranslations, /*scale=*/3.0);
|
||||||
|
|
||||||
// Check result
|
// Check result
|
||||||
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
|
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
|
||||||
|
@ -222,12 +218,10 @@ TEST(TranslationRecovery, FourPosesIncludingZeroTranslation) {
|
||||||
unitTranslation.measured()));
|
unitTranslation.measured()));
|
||||||
}
|
}
|
||||||
|
|
||||||
TranslationRecovery algorithm(relativeTranslations);
|
TranslationRecovery algorithm;
|
||||||
const auto graph = algorithm.buildGraph();
|
|
||||||
EXPECT_LONGS_EQUAL(3, graph.size());
|
|
||||||
|
|
||||||
// Run translation recovery
|
// Run translation recovery
|
||||||
const auto result = algorithm.run(/*betweenTranslations=*/{}, /*scale=*/4.0);
|
const auto result = algorithm.run(relativeTranslations, /*scale=*/4.0);
|
||||||
|
|
||||||
// Check result
|
// Check result
|
||||||
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
|
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
|
||||||
|
@ -251,13 +245,10 @@ TEST(TranslationRecovery, ThreePosesWithZeroTranslation) {
|
||||||
unitTranslation.measured()));
|
unitTranslation.measured()));
|
||||||
}
|
}
|
||||||
|
|
||||||
TranslationRecovery algorithm(relativeTranslations);
|
TranslationRecovery algorithm;
|
||||||
const auto graph = algorithm.buildGraph();
|
|
||||||
// Graph size will be zero as there no 'non-zero distance' edges.
|
|
||||||
EXPECT_LONGS_EQUAL(0, graph.size());
|
|
||||||
|
|
||||||
// Run translation recovery
|
// Run translation recovery
|
||||||
const auto result = algorithm.run(/*betweenTranslations=*/{}, /*scale=*/4.0);
|
const auto result = algorithm.run(relativeTranslations, /*scale=*/4.0);
|
||||||
|
|
||||||
// Check result
|
// Check result
|
||||||
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
|
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
|
||||||
|
@ -289,8 +280,9 @@ TEST(TranslationRecovery, ThreePosesWithOneSoftConstraint) {
|
||||||
betweenTranslations.emplace_back(0, 3, Point3(1, -1, 0),
|
betweenTranslations.emplace_back(0, 3, Point3(1, -1, 0),
|
||||||
noiseModel::Isotropic::Sigma(3, 1e-2));
|
noiseModel::Isotropic::Sigma(3, 1e-2));
|
||||||
|
|
||||||
TranslationRecovery algorithm(relativeTranslations);
|
TranslationRecovery algorithm;
|
||||||
auto result = algorithm.run(betweenTranslations);
|
auto result =
|
||||||
|
algorithm.run(relativeTranslations, /*scale=*/0.0, betweenTranslations);
|
||||||
|
|
||||||
// Check result
|
// Check result
|
||||||
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-4));
|
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-4));
|
||||||
|
@ -322,8 +314,9 @@ TEST(TranslationRecovery, ThreePosesWithOneHardConstraint) {
|
||||||
betweenTranslations.emplace_back(0, 1, Point3(2, 0, 0),
|
betweenTranslations.emplace_back(0, 1, Point3(2, 0, 0),
|
||||||
noiseModel::Constrained::All(3, 1e2));
|
noiseModel::Constrained::All(3, 1e2));
|
||||||
|
|
||||||
TranslationRecovery algorithm(relativeTranslations);
|
TranslationRecovery algorithm;
|
||||||
auto result = algorithm.run(betweenTranslations);
|
auto result =
|
||||||
|
algorithm.run(relativeTranslations, /*scale=*/0.0, betweenTranslations);
|
||||||
|
|
||||||
// Check result
|
// Check result
|
||||||
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-4));
|
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-4));
|
||||||
|
|
Loading…
Reference in New Issue