all tests done!
parent
3ac97c3dbe
commit
5274abafd0
|
@ -75,27 +75,26 @@ class GncOptimizer {
|
|||
}
|
||||
|
||||
// check that known inliers and outliers make sense:
|
||||
std::vector<size_t> incorrectlySpecifiedWeights; // measurements the user has incorrectly specified
|
||||
std::vector<size_t> inconsistentlySpecifiedWeights; // measurements the user has incorrectly specified
|
||||
// to be BOTH known inliers and known outliers
|
||||
std::set_intersection(params.knownInliers.begin(),params.knownInliers.end(),
|
||||
params.knownOutliers.begin(),
|
||||
params.knownOutliers.end(),
|
||||
std::inserter(incorrectlySpecifiedWeights, incorrectlySpecifiedWeights.begin()));
|
||||
if(incorrectlySpecifiedWeights.size() > 0){
|
||||
params.knownOutliers.begin(),params.knownOutliers.end(),
|
||||
std::inserter(inconsistentlySpecifiedWeights, inconsistentlySpecifiedWeights.begin()));
|
||||
if(inconsistentlySpecifiedWeights.size() > 0){ // if we have inconsistently specified weights, we throw an exception
|
||||
params.print("params\n");
|
||||
throw std::runtime_error("GncOptimizer::constructor: the user has selected one or more measurements"
|
||||
" to be both a known inlier and a known outlier.");
|
||||
" to be BOTH a known inlier and a known outlier.");
|
||||
}
|
||||
// check that known inliers are in the graph
|
||||
for (size_t i = 0; i < params.knownInliers.size(); i++){
|
||||
if( params.knownInliers[i] > nfg_.size()-1 ){
|
||||
if( params.knownInliers[i] > nfg_.size()-1 ){ // outside graph
|
||||
throw std::runtime_error("GncOptimizer::constructor: the user has selected one or more measurements"
|
||||
"that are not in the factor graph to be known inliers.");
|
||||
}
|
||||
}
|
||||
// check that known outliers are in the graph
|
||||
for (size_t i = 0; i < params.knownOutliers.size(); i++){
|
||||
if( params.knownOutliers[i] > nfg_.size()-1 ){
|
||||
if( params.knownOutliers[i] > nfg_.size()-1 ){ // outside graph
|
||||
throw std::runtime_error("GncOptimizer::constructor: the user has selected one or more measurements"
|
||||
"that are not in the factor graph to be known outliers.");
|
||||
}
|
||||
|
@ -161,7 +160,7 @@ class GncOptimizer {
|
|||
Vector initializeWeightsFromKnownInliersAndOutliers() const{
|
||||
Vector weights = Vector::Ones(nfg_.size());
|
||||
for (size_t i = 0; i < params_.knownOutliers.size(); i++){
|
||||
weights[ params_.knownOutliers[i] ] = 0.0;
|
||||
weights[ params_.knownOutliers[i] ] = 0.0; // known to be outliers
|
||||
}
|
||||
return weights;
|
||||
}
|
||||
|
@ -170,10 +169,11 @@ class GncOptimizer {
|
|||
Values optimize() {
|
||||
// start by assuming all measurements are inliers
|
||||
weights_ = initializeWeightsFromKnownInliersAndOutliers();
|
||||
BaseOptimizer baseOptimizer(nfg_, state_);
|
||||
NonlinearFactorGraph graph_initial = this->makeWeightedGraph(weights_);
|
||||
BaseOptimizer baseOptimizer(graph_initial, state_);
|
||||
Values result = baseOptimizer.optimize();
|
||||
double mu = initializeMu();
|
||||
double prev_cost = nfg_.error(result);
|
||||
double prev_cost = graph_initial.error(result);
|
||||
double cost = 0.0; // this will be updated in the main loop
|
||||
|
||||
// handle the degenerate case that corresponds to small
|
||||
|
@ -181,8 +181,8 @@ class GncOptimizer {
|
|||
// For GM: if residual error is small, mu -> 0
|
||||
// For TLS: if residual error is small, mu -> -1
|
||||
int nrUnknownInOrOut = nfg_.size() - ( params_.knownInliers.size() + params_.knownOutliers.size() );
|
||||
// ^^ number of measurements that are not known to be inliers or outliers
|
||||
if (mu <= 0 || nrUnknownInOrOut == 0) {
|
||||
// ^^ number of measurements that are not known to be inliers or outliers (GNC will need to figure them out)
|
||||
if (mu <= 0 || nrUnknownInOrOut == 0) { // no need to even call GNC in this case
|
||||
if (mu <= 0 && params_.verbosity >= GncParameters::Verbosity::SUMMARY) {
|
||||
std::cout << "GNC Optimizer stopped because maximum residual at "
|
||||
"initialization is small."
|
||||
|
@ -397,21 +397,21 @@ class GncOptimizer {
|
|||
for (size_t k = 0; k < nfg_.size(); k++) {
|
||||
allWeights.push_back(k);
|
||||
}
|
||||
std::vector<size_t> knownWeights;
|
||||
std::set_union(params_.knownInliers.begin(), params_.knownInliers.end(),
|
||||
params_.knownOutliers.begin(), params_.knownOutliers.end(),
|
||||
std::inserter(knownWeights, knownWeights.begin()));
|
||||
|
||||
std::vector<size_t> unknownWeights;
|
||||
std::set_difference(allWeights.begin(), allWeights.end(),
|
||||
params_.knownInliers.begin(),
|
||||
params_.knownInliers.end(),
|
||||
knownWeights.begin(), knownWeights.end(),
|
||||
std::inserter(unknownWeights, unknownWeights.begin()));
|
||||
std::set_difference(unknownWeights.begin(), unknownWeights.end(),
|
||||
params_.knownOutliers.begin(),
|
||||
params_.knownOutliers.end(),
|
||||
std::inserter(unknownWeights, unknownWeights.begin()));
|
||||
|
||||
// update weights of known inlier/outlier measurements
|
||||
switch (params_.lossType) {
|
||||
case GncLossType::GM: { // use eq (12) in GNC paper
|
||||
for (size_t k : unknownWeights) {
|
||||
if (nfg_[k]) {
|
||||
if (nfg_[k]) {
|
||||
double u2_k = nfg_[k]->error(currentEstimate); // squared (and whitened) residual
|
||||
weights[k] = std::pow(
|
||||
(mu * barcSq_[k]) / (u2_k + mu * barcSq_[k]), 2);
|
||||
|
|
|
@ -757,69 +757,86 @@ TEST(GncOptimizer, knownInliersAndOutliers) {
|
|||
Values initial;
|
||||
initial.insert(X(1), p0);
|
||||
|
||||
std::vector<size_t> knownInliers;
|
||||
knownInliers.push_back(0);
|
||||
knownInliers.push_back(1);
|
||||
knownInliers.push_back(2);
|
||||
// nonconvexity with known inliers and known outliers (check early stopping
|
||||
// when all measurements are known to be inliers or outliers)
|
||||
{
|
||||
std::vector<size_t> knownInliers;
|
||||
knownInliers.push_back(0);
|
||||
knownInliers.push_back(1);
|
||||
knownInliers.push_back(2);
|
||||
|
||||
fg.print(" \n ");
|
||||
std::vector<size_t> knownOutliers;
|
||||
knownOutliers.push_back(3);
|
||||
|
||||
// // nonconvexity with known inliers
|
||||
// {
|
||||
// GncParams<GaussNewtonParams> gncParams;
|
||||
// gncParams.setKnownInliers(knownInliers);
|
||||
// gncParams.setLossType(GncLossType::GM);
|
||||
// //gncParams.setVerbosityGNC(GncParams<GaussNewtonParams>::Verbosity::SUMMARY);
|
||||
// auto gnc = GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial,
|
||||
// gncParams);
|
||||
// gnc.setInlierCostThresholds(1.0);
|
||||
// Values gnc_result = gnc.optimize();
|
||||
// CHECK(assert_equal(Point2(0.0, 0.0), gnc_result.at<Point2>(X(1)), 1e-3));
|
||||
//
|
||||
// // check weights were actually fixed:
|
||||
// Vector finalWeights = gnc.getWeights();
|
||||
// DOUBLES_EQUAL(1.0, finalWeights[0], tol);
|
||||
// DOUBLES_EQUAL(1.0, finalWeights[1], tol);
|
||||
// DOUBLES_EQUAL(1.0, finalWeights[2], tol);
|
||||
// }
|
||||
// {
|
||||
// GncParams<GaussNewtonParams> gncParams;
|
||||
// gncParams.setKnownInliers(knownInliers);
|
||||
// gncParams.setLossType(GncLossType::TLS);
|
||||
// // gncParams.setVerbosityGNC(GncParams<GaussNewtonParams>::Verbosity::SUMMARY);
|
||||
// auto gnc = GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial,
|
||||
// gncParams);
|
||||
//
|
||||
// Values gnc_result = gnc.optimize();
|
||||
// CHECK(assert_equal(Point2(0.0, 0.0), gnc_result.at<Point2>(X(1)), 1e-3));
|
||||
//
|
||||
// // check weights were actually fixed:
|
||||
// Vector finalWeights = gnc.getWeights();
|
||||
// DOUBLES_EQUAL(1.0, finalWeights[0], tol);
|
||||
// DOUBLES_EQUAL(1.0, finalWeights[1], tol);
|
||||
// DOUBLES_EQUAL(1.0, finalWeights[2], tol);
|
||||
// DOUBLES_EQUAL(0.0, finalWeights[3], tol);
|
||||
// }
|
||||
// {
|
||||
// // if we set the threshold large, they are all inliers
|
||||
// GncParams<GaussNewtonParams> gncParams;
|
||||
// gncParams.setKnownInliers(knownInliers);
|
||||
// gncParams.setLossType(GncLossType::TLS);
|
||||
// //gncParams.setVerbosityGNC(GncParams<GaussNewtonParams>::Verbosity::VALUES);
|
||||
// auto gnc = GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial,
|
||||
// gncParams);
|
||||
// gnc.setInlierCostThresholds(100.0);
|
||||
//
|
||||
// Values gnc_result = gnc.optimize();
|
||||
// CHECK(assert_equal(Point2(0.25, 0.0), gnc_result.at<Point2>(X(1)), 1e-3));
|
||||
//
|
||||
// // check weights were actually fixed:
|
||||
// Vector finalWeights = gnc.getWeights();
|
||||
// DOUBLES_EQUAL(1.0, finalWeights[0], tol);
|
||||
// DOUBLES_EQUAL(1.0, finalWeights[1], tol);
|
||||
// DOUBLES_EQUAL(1.0, finalWeights[2], tol);
|
||||
// DOUBLES_EQUAL(1.0, finalWeights[3], tol);
|
||||
// }
|
||||
GncParams<GaussNewtonParams> gncParams;
|
||||
gncParams.setKnownInliers(knownInliers);
|
||||
gncParams.setKnownOutliers(knownOutliers);
|
||||
gncParams.setLossType(GncLossType::GM);
|
||||
//gncParams.setVerbosityGNC(GncParams<GaussNewtonParams>::Verbosity::SUMMARY);
|
||||
auto gnc = GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial,
|
||||
gncParams);
|
||||
gnc.setInlierCostThresholds(1.0);
|
||||
Values gnc_result = gnc.optimize();
|
||||
CHECK(assert_equal(Point2(0.0, 0.0), gnc_result.at<Point2>(X(1)), 1e-3));
|
||||
|
||||
// check weights were actually fixed:
|
||||
Vector finalWeights = gnc.getWeights();
|
||||
DOUBLES_EQUAL(1.0, finalWeights[0], tol);
|
||||
DOUBLES_EQUAL(1.0, finalWeights[1], tol);
|
||||
DOUBLES_EQUAL(1.0, finalWeights[2], tol);
|
||||
DOUBLES_EQUAL(0.0, finalWeights[3], tol);
|
||||
}
|
||||
|
||||
// nonconvexity with known inliers and known outliers
|
||||
{
|
||||
std::vector<size_t> knownInliers;
|
||||
knownInliers.push_back(2);
|
||||
knownInliers.push_back(0);
|
||||
|
||||
std::vector<size_t> knownOutliers;
|
||||
knownOutliers.push_back(3);
|
||||
|
||||
GncParams<GaussNewtonParams> gncParams;
|
||||
gncParams.setKnownInliers(knownInliers);
|
||||
gncParams.setKnownOutliers(knownOutliers);
|
||||
gncParams.setLossType(GncLossType::GM);
|
||||
//gncParams.setVerbosityGNC(GncParams<GaussNewtonParams>::Verbosity::SUMMARY);
|
||||
auto gnc = GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial,
|
||||
gncParams);
|
||||
gnc.setInlierCostThresholds(1.0);
|
||||
Values gnc_result = gnc.optimize();
|
||||
CHECK(assert_equal(Point2(0.0, 0.0), gnc_result.at<Point2>(X(1)), 1e-3));
|
||||
|
||||
// check weights were actually fixed:
|
||||
Vector finalWeights = gnc.getWeights();
|
||||
DOUBLES_EQUAL(1.0, finalWeights[0], tol);
|
||||
DOUBLES_EQUAL(1.0, finalWeights[1], 1e-5);
|
||||
DOUBLES_EQUAL(1.0, finalWeights[2], tol);
|
||||
DOUBLES_EQUAL(0.0, finalWeights[3], tol);
|
||||
}
|
||||
|
||||
// only known outliers
|
||||
{
|
||||
std::vector<size_t> knownOutliers;
|
||||
knownOutliers.push_back(3);
|
||||
|
||||
GncParams<GaussNewtonParams> gncParams;
|
||||
gncParams.setKnownOutliers(knownOutliers);
|
||||
gncParams.setLossType(GncLossType::GM);
|
||||
//gncParams.setVerbosityGNC(GncParams<GaussNewtonParams>::Verbosity::SUMMARY);
|
||||
auto gnc = GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial,
|
||||
gncParams);
|
||||
gnc.setInlierCostThresholds(1.0);
|
||||
Values gnc_result = gnc.optimize();
|
||||
CHECK(assert_equal(Point2(0.0, 0.0), gnc_result.at<Point2>(X(1)), 1e-3));
|
||||
|
||||
// check weights were actually fixed:
|
||||
Vector finalWeights = gnc.getWeights();
|
||||
DOUBLES_EQUAL(1.0, finalWeights[0], tol);
|
||||
DOUBLES_EQUAL(1.0, finalWeights[1], 1e-5);
|
||||
DOUBLES_EQUAL(1.0, finalWeights[2], tol);
|
||||
DOUBLES_EQUAL(0.0, finalWeights[3], tol);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
Loading…
Reference in New Issue