all tests done!
parent
3ac97c3dbe
commit
5274abafd0
|
@ -75,27 +75,26 @@ class GncOptimizer {
|
||||||
}
|
}
|
||||||
|
|
||||||
// check that known inliers and outliers make sense:
|
// check that known inliers and outliers make sense:
|
||||||
std::vector<size_t> incorrectlySpecifiedWeights; // measurements the user has incorrectly specified
|
std::vector<size_t> inconsistentlySpecifiedWeights; // measurements the user has incorrectly specified
|
||||||
// to be BOTH known inliers and known outliers
|
// to be BOTH known inliers and known outliers
|
||||||
std::set_intersection(params.knownInliers.begin(),params.knownInliers.end(),
|
std::set_intersection(params.knownInliers.begin(),params.knownInliers.end(),
|
||||||
params.knownOutliers.begin(),
|
params.knownOutliers.begin(),params.knownOutliers.end(),
|
||||||
params.knownOutliers.end(),
|
std::inserter(inconsistentlySpecifiedWeights, inconsistentlySpecifiedWeights.begin()));
|
||||||
std::inserter(incorrectlySpecifiedWeights, incorrectlySpecifiedWeights.begin()));
|
if(inconsistentlySpecifiedWeights.size() > 0){ // if we have inconsistently specified weights, we throw an exception
|
||||||
if(incorrectlySpecifiedWeights.size() > 0){
|
|
||||||
params.print("params\n");
|
params.print("params\n");
|
||||||
throw std::runtime_error("GncOptimizer::constructor: the user has selected one or more measurements"
|
throw std::runtime_error("GncOptimizer::constructor: the user has selected one or more measurements"
|
||||||
" to be both a known inlier and a known outlier.");
|
" to be BOTH a known inlier and a known outlier.");
|
||||||
}
|
}
|
||||||
// check that known inliers are in the graph
|
// check that known inliers are in the graph
|
||||||
for (size_t i = 0; i < params.knownInliers.size(); i++){
|
for (size_t i = 0; i < params.knownInliers.size(); i++){
|
||||||
if( params.knownInliers[i] > nfg_.size()-1 ){
|
if( params.knownInliers[i] > nfg_.size()-1 ){ // outside graph
|
||||||
throw std::runtime_error("GncOptimizer::constructor: the user has selected one or more measurements"
|
throw std::runtime_error("GncOptimizer::constructor: the user has selected one or more measurements"
|
||||||
"that are not in the factor graph to be known inliers.");
|
"that are not in the factor graph to be known inliers.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// check that known outliers are in the graph
|
// check that known outliers are in the graph
|
||||||
for (size_t i = 0; i < params.knownOutliers.size(); i++){
|
for (size_t i = 0; i < params.knownOutliers.size(); i++){
|
||||||
if( params.knownOutliers[i] > nfg_.size()-1 ){
|
if( params.knownOutliers[i] > nfg_.size()-1 ){ // outside graph
|
||||||
throw std::runtime_error("GncOptimizer::constructor: the user has selected one or more measurements"
|
throw std::runtime_error("GncOptimizer::constructor: the user has selected one or more measurements"
|
||||||
"that are not in the factor graph to be known outliers.");
|
"that are not in the factor graph to be known outliers.");
|
||||||
}
|
}
|
||||||
|
@ -161,7 +160,7 @@ class GncOptimizer {
|
||||||
Vector initializeWeightsFromKnownInliersAndOutliers() const{
|
Vector initializeWeightsFromKnownInliersAndOutliers() const{
|
||||||
Vector weights = Vector::Ones(nfg_.size());
|
Vector weights = Vector::Ones(nfg_.size());
|
||||||
for (size_t i = 0; i < params_.knownOutliers.size(); i++){
|
for (size_t i = 0; i < params_.knownOutliers.size(); i++){
|
||||||
weights[ params_.knownOutliers[i] ] = 0.0;
|
weights[ params_.knownOutliers[i] ] = 0.0; // known to be outliers
|
||||||
}
|
}
|
||||||
return weights;
|
return weights;
|
||||||
}
|
}
|
||||||
|
@ -170,10 +169,11 @@ class GncOptimizer {
|
||||||
Values optimize() {
|
Values optimize() {
|
||||||
// start by assuming all measurements are inliers
|
// start by assuming all measurements are inliers
|
||||||
weights_ = initializeWeightsFromKnownInliersAndOutliers();
|
weights_ = initializeWeightsFromKnownInliersAndOutliers();
|
||||||
BaseOptimizer baseOptimizer(nfg_, state_);
|
NonlinearFactorGraph graph_initial = this->makeWeightedGraph(weights_);
|
||||||
|
BaseOptimizer baseOptimizer(graph_initial, state_);
|
||||||
Values result = baseOptimizer.optimize();
|
Values result = baseOptimizer.optimize();
|
||||||
double mu = initializeMu();
|
double mu = initializeMu();
|
||||||
double prev_cost = nfg_.error(result);
|
double prev_cost = graph_initial.error(result);
|
||||||
double cost = 0.0; // this will be updated in the main loop
|
double cost = 0.0; // this will be updated in the main loop
|
||||||
|
|
||||||
// handle the degenerate case that corresponds to small
|
// handle the degenerate case that corresponds to small
|
||||||
|
@ -181,8 +181,8 @@ class GncOptimizer {
|
||||||
// For GM: if residual error is small, mu -> 0
|
// For GM: if residual error is small, mu -> 0
|
||||||
// For TLS: if residual error is small, mu -> -1
|
// For TLS: if residual error is small, mu -> -1
|
||||||
int nrUnknownInOrOut = nfg_.size() - ( params_.knownInliers.size() + params_.knownOutliers.size() );
|
int nrUnknownInOrOut = nfg_.size() - ( params_.knownInliers.size() + params_.knownOutliers.size() );
|
||||||
// ^^ number of measurements that are not known to be inliers or outliers
|
// ^^ number of measurements that are not known to be inliers or outliers (GNC will need to figure them out)
|
||||||
if (mu <= 0 || nrUnknownInOrOut == 0) {
|
if (mu <= 0 || nrUnknownInOrOut == 0) { // no need to even call GNC in this case
|
||||||
if (mu <= 0 && params_.verbosity >= GncParameters::Verbosity::SUMMARY) {
|
if (mu <= 0 && params_.verbosity >= GncParameters::Verbosity::SUMMARY) {
|
||||||
std::cout << "GNC Optimizer stopped because maximum residual at "
|
std::cout << "GNC Optimizer stopped because maximum residual at "
|
||||||
"initialization is small."
|
"initialization is small."
|
||||||
|
@ -397,21 +397,21 @@ class GncOptimizer {
|
||||||
for (size_t k = 0; k < nfg_.size(); k++) {
|
for (size_t k = 0; k < nfg_.size(); k++) {
|
||||||
allWeights.push_back(k);
|
allWeights.push_back(k);
|
||||||
}
|
}
|
||||||
|
std::vector<size_t> knownWeights;
|
||||||
|
std::set_union(params_.knownInliers.begin(), params_.knownInliers.end(),
|
||||||
|
params_.knownOutliers.begin(), params_.knownOutliers.end(),
|
||||||
|
std::inserter(knownWeights, knownWeights.begin()));
|
||||||
|
|
||||||
std::vector<size_t> unknownWeights;
|
std::vector<size_t> unknownWeights;
|
||||||
std::set_difference(allWeights.begin(), allWeights.end(),
|
std::set_difference(allWeights.begin(), allWeights.end(),
|
||||||
params_.knownInliers.begin(),
|
knownWeights.begin(), knownWeights.end(),
|
||||||
params_.knownInliers.end(),
|
|
||||||
std::inserter(unknownWeights, unknownWeights.begin()));
|
std::inserter(unknownWeights, unknownWeights.begin()));
|
||||||
std::set_difference(unknownWeights.begin(), unknownWeights.end(),
|
|
||||||
params_.knownOutliers.begin(),
|
|
||||||
params_.knownOutliers.end(),
|
|
||||||
std::inserter(unknownWeights, unknownWeights.begin()));
|
|
||||||
|
|
||||||
// update weights of known inlier/outlier measurements
|
// update weights of known inlier/outlier measurements
|
||||||
switch (params_.lossType) {
|
switch (params_.lossType) {
|
||||||
case GncLossType::GM: { // use eq (12) in GNC paper
|
case GncLossType::GM: { // use eq (12) in GNC paper
|
||||||
for (size_t k : unknownWeights) {
|
for (size_t k : unknownWeights) {
|
||||||
if (nfg_[k]) {
|
if (nfg_[k]) {
|
||||||
double u2_k = nfg_[k]->error(currentEstimate); // squared (and whitened) residual
|
double u2_k = nfg_[k]->error(currentEstimate); // squared (and whitened) residual
|
||||||
weights[k] = std::pow(
|
weights[k] = std::pow(
|
||||||
(mu * barcSq_[k]) / (u2_k + mu * barcSq_[k]), 2);
|
(mu * barcSq_[k]) / (u2_k + mu * barcSq_[k]), 2);
|
||||||
|
|
|
@ -757,69 +757,86 @@ TEST(GncOptimizer, knownInliersAndOutliers) {
|
||||||
Values initial;
|
Values initial;
|
||||||
initial.insert(X(1), p0);
|
initial.insert(X(1), p0);
|
||||||
|
|
||||||
std::vector<size_t> knownInliers;
|
// nonconvexity with known inliers and known outliers (check early stopping
|
||||||
knownInliers.push_back(0);
|
// when all measurements are known to be inliers or outliers)
|
||||||
knownInliers.push_back(1);
|
{
|
||||||
knownInliers.push_back(2);
|
std::vector<size_t> knownInliers;
|
||||||
|
knownInliers.push_back(0);
|
||||||
|
knownInliers.push_back(1);
|
||||||
|
knownInliers.push_back(2);
|
||||||
|
|
||||||
fg.print(" \n ");
|
std::vector<size_t> knownOutliers;
|
||||||
|
knownOutliers.push_back(3);
|
||||||
|
|
||||||
// // nonconvexity with known inliers
|
GncParams<GaussNewtonParams> gncParams;
|
||||||
// {
|
gncParams.setKnownInliers(knownInliers);
|
||||||
// GncParams<GaussNewtonParams> gncParams;
|
gncParams.setKnownOutliers(knownOutliers);
|
||||||
// gncParams.setKnownInliers(knownInliers);
|
gncParams.setLossType(GncLossType::GM);
|
||||||
// gncParams.setLossType(GncLossType::GM);
|
//gncParams.setVerbosityGNC(GncParams<GaussNewtonParams>::Verbosity::SUMMARY);
|
||||||
// //gncParams.setVerbosityGNC(GncParams<GaussNewtonParams>::Verbosity::SUMMARY);
|
auto gnc = GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial,
|
||||||
// auto gnc = GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial,
|
gncParams);
|
||||||
// gncParams);
|
gnc.setInlierCostThresholds(1.0);
|
||||||
// gnc.setInlierCostThresholds(1.0);
|
Values gnc_result = gnc.optimize();
|
||||||
// Values gnc_result = gnc.optimize();
|
CHECK(assert_equal(Point2(0.0, 0.0), gnc_result.at<Point2>(X(1)), 1e-3));
|
||||||
// CHECK(assert_equal(Point2(0.0, 0.0), gnc_result.at<Point2>(X(1)), 1e-3));
|
|
||||||
//
|
// check weights were actually fixed:
|
||||||
// // check weights were actually fixed:
|
Vector finalWeights = gnc.getWeights();
|
||||||
// Vector finalWeights = gnc.getWeights();
|
DOUBLES_EQUAL(1.0, finalWeights[0], tol);
|
||||||
// DOUBLES_EQUAL(1.0, finalWeights[0], tol);
|
DOUBLES_EQUAL(1.0, finalWeights[1], tol);
|
||||||
// DOUBLES_EQUAL(1.0, finalWeights[1], tol);
|
DOUBLES_EQUAL(1.0, finalWeights[2], tol);
|
||||||
// DOUBLES_EQUAL(1.0, finalWeights[2], tol);
|
DOUBLES_EQUAL(0.0, finalWeights[3], tol);
|
||||||
// }
|
}
|
||||||
// {
|
|
||||||
// GncParams<GaussNewtonParams> gncParams;
|
// nonconvexity with known inliers and known outliers
|
||||||
// gncParams.setKnownInliers(knownInliers);
|
{
|
||||||
// gncParams.setLossType(GncLossType::TLS);
|
std::vector<size_t> knownInliers;
|
||||||
// // gncParams.setVerbosityGNC(GncParams<GaussNewtonParams>::Verbosity::SUMMARY);
|
knownInliers.push_back(2);
|
||||||
// auto gnc = GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial,
|
knownInliers.push_back(0);
|
||||||
// gncParams);
|
|
||||||
//
|
std::vector<size_t> knownOutliers;
|
||||||
// Values gnc_result = gnc.optimize();
|
knownOutliers.push_back(3);
|
||||||
// CHECK(assert_equal(Point2(0.0, 0.0), gnc_result.at<Point2>(X(1)), 1e-3));
|
|
||||||
//
|
GncParams<GaussNewtonParams> gncParams;
|
||||||
// // check weights were actually fixed:
|
gncParams.setKnownInliers(knownInliers);
|
||||||
// Vector finalWeights = gnc.getWeights();
|
gncParams.setKnownOutliers(knownOutliers);
|
||||||
// DOUBLES_EQUAL(1.0, finalWeights[0], tol);
|
gncParams.setLossType(GncLossType::GM);
|
||||||
// DOUBLES_EQUAL(1.0, finalWeights[1], tol);
|
//gncParams.setVerbosityGNC(GncParams<GaussNewtonParams>::Verbosity::SUMMARY);
|
||||||
// DOUBLES_EQUAL(1.0, finalWeights[2], tol);
|
auto gnc = GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial,
|
||||||
// DOUBLES_EQUAL(0.0, finalWeights[3], tol);
|
gncParams);
|
||||||
// }
|
gnc.setInlierCostThresholds(1.0);
|
||||||
// {
|
Values gnc_result = gnc.optimize();
|
||||||
// // if we set the threshold large, they are all inliers
|
CHECK(assert_equal(Point2(0.0, 0.0), gnc_result.at<Point2>(X(1)), 1e-3));
|
||||||
// GncParams<GaussNewtonParams> gncParams;
|
|
||||||
// gncParams.setKnownInliers(knownInliers);
|
// check weights were actually fixed:
|
||||||
// gncParams.setLossType(GncLossType::TLS);
|
Vector finalWeights = gnc.getWeights();
|
||||||
// //gncParams.setVerbosityGNC(GncParams<GaussNewtonParams>::Verbosity::VALUES);
|
DOUBLES_EQUAL(1.0, finalWeights[0], tol);
|
||||||
// auto gnc = GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial,
|
DOUBLES_EQUAL(1.0, finalWeights[1], 1e-5);
|
||||||
// gncParams);
|
DOUBLES_EQUAL(1.0, finalWeights[2], tol);
|
||||||
// gnc.setInlierCostThresholds(100.0);
|
DOUBLES_EQUAL(0.0, finalWeights[3], tol);
|
||||||
//
|
}
|
||||||
// Values gnc_result = gnc.optimize();
|
|
||||||
// CHECK(assert_equal(Point2(0.25, 0.0), gnc_result.at<Point2>(X(1)), 1e-3));
|
// only known outliers
|
||||||
//
|
{
|
||||||
// // check weights were actually fixed:
|
std::vector<size_t> knownOutliers;
|
||||||
// Vector finalWeights = gnc.getWeights();
|
knownOutliers.push_back(3);
|
||||||
// DOUBLES_EQUAL(1.0, finalWeights[0], tol);
|
|
||||||
// DOUBLES_EQUAL(1.0, finalWeights[1], tol);
|
GncParams<GaussNewtonParams> gncParams;
|
||||||
// DOUBLES_EQUAL(1.0, finalWeights[2], tol);
|
gncParams.setKnownOutliers(knownOutliers);
|
||||||
// DOUBLES_EQUAL(1.0, finalWeights[3], tol);
|
gncParams.setLossType(GncLossType::GM);
|
||||||
// }
|
//gncParams.setVerbosityGNC(GncParams<GaussNewtonParams>::Verbosity::SUMMARY);
|
||||||
|
auto gnc = GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial,
|
||||||
|
gncParams);
|
||||||
|
gnc.setInlierCostThresholds(1.0);
|
||||||
|
Values gnc_result = gnc.optimize();
|
||||||
|
CHECK(assert_equal(Point2(0.0, 0.0), gnc_result.at<Point2>(X(1)), 1e-3));
|
||||||
|
|
||||||
|
// check weights were actually fixed:
|
||||||
|
Vector finalWeights = gnc.getWeights();
|
||||||
|
DOUBLES_EQUAL(1.0, finalWeights[0], tol);
|
||||||
|
DOUBLES_EQUAL(1.0, finalWeights[1], 1e-5);
|
||||||
|
DOUBLES_EQUAL(1.0, finalWeights[2], tol);
|
||||||
|
DOUBLES_EQUAL(0.0, finalWeights[3], tol);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
Loading…
Reference in New Issue