adding knownOutlier input to GNC

release/4.3a0
lcarlone 2021-05-10 10:30:32 -04:00
parent 7342438fb3
commit 3ac97c3dbe
3 changed files with 140 additions and 6 deletions

View File

@ -74,6 +74,32 @@ class GncOptimizer {
}
}
// check that known inliers and outliers make sense:
std::vector<size_t> incorrectlySpecifiedWeights; // measurements the user has incorrectly specified
// to be BOTH known inliers and known outliers
std::set_intersection(params.knownInliers.begin(),params.knownInliers.end(),
params.knownOutliers.begin(),
params.knownOutliers.end(),
std::inserter(incorrectlySpecifiedWeights, incorrectlySpecifiedWeights.begin()));
if(incorrectlySpecifiedWeights.size() > 0){
params.print("params\n");
throw std::runtime_error("GncOptimizer::constructor: the user has selected one or more measurements"
" to be both a known inlier and a known outlier.");
}
// check that known inliers are in the graph
for (size_t i = 0; i < params.knownInliers.size(); i++){
if( params.knownInliers[i] > nfg_.size()-1 ){
throw std::runtime_error("GncOptimizer::constructor: the user has selected one or more measurements"
"that are not in the factor graph to be known inliers.");
}
}
// check that known outliers are in the graph
for (size_t i = 0; i < params.knownOutliers.size(); i++){
if( params.knownOutliers[i] > nfg_.size()-1 ){
throw std::runtime_error("GncOptimizer::constructor: the user has selected one or more measurements"
"that are not in the factor graph to be known outliers.");
}
}
// set default barcSq_ (inlier threshold)
double alpha = 0.99; // with this (default) probability, inlier residuals are smaller than barcSq_
setInlierCostThresholdsAtProbability(alpha);
@ -132,10 +158,18 @@ class GncOptimizer {
&& equal(barcSq_, other.getInlierCostThresholds());
}
Vector initializeWeightsFromKnownInliersAndOutliers() const{
Vector weights = Vector::Ones(nfg_.size());
for (size_t i = 0; i < params_.knownOutliers.size(); i++){
weights[ params_.knownOutliers[i] ] = 0.0;
}
return weights;
}
/// Compute optimal solution using graduated non-convexity.
Values optimize() {
// start by assuming all measurements are inliers
weights_ = Vector::Ones(nfg_.size());
weights_ = initializeWeightsFromKnownInliersAndOutliers();
BaseOptimizer baseOptimizer(nfg_, state_);
Values result = baseOptimizer.optimize();
double mu = initializeMu();
@ -146,12 +180,18 @@ class GncOptimizer {
// maximum residual errors at initialization
// For GM: if residual error is small, mu -> 0
// For TLS: if residual error is small, mu -> -1
if (mu <= 0) {
if (params_.verbosity >= GncParameters::Verbosity::SUMMARY) {
int nrUnknownInOrOut = nfg_.size() - ( params_.knownInliers.size() + params_.knownOutliers.size() );
// ^^ number of measurements that are not known to be inliers or outliers
if (mu <= 0 || nrUnknownInOrOut == 0) {
if (mu <= 0 && params_.verbosity >= GncParameters::Verbosity::SUMMARY) {
std::cout << "GNC Optimizer stopped because maximum residual at "
"initialization is small."
<< std::endl;
}
if (nrUnknownInOrOut==0 && params_.verbosity >= GncParameters::Verbosity::SUMMARY) {
std::cout << "GNC Optimizer stopped because all measurements are already known to be inliers or outliers"
<< std::endl;
}
if (params_.verbosity >= GncParameters::Verbosity::VALUES) {
result.print("result\n");
std::cout << "mu: " << mu << std::endl;
@ -350,7 +390,7 @@ class GncOptimizer {
/// Calculate gnc weights.
Vector calculateWeights(const Values& currentEstimate, const double mu) {
Vector weights = Vector::Ones(nfg_.size());
Vector weights = initializeWeightsFromKnownInliersAndOutliers();
// do not update the weights that the user has decided are known inliers
std::vector<size_t> allWeights;
@ -362,6 +402,10 @@ class GncOptimizer {
params_.knownInliers.begin(),
params_.knownInliers.end(),
std::inserter(unknownWeights, unknownWeights.begin()));
std::set_difference(unknownWeights.begin(), unknownWeights.end(),
params_.knownOutliers.begin(),
params_.knownOutliers.end(),
std::inserter(unknownWeights, unknownWeights.begin()));
// update weights of known inlier/outlier measurements
switch (params_.lossType) {

View File

@ -71,6 +71,7 @@ class GncParams {
double weightsTol = 1e-4; ///< If the weights are within weightsTol from being binary, stop iterating (only for TLS)
Verbosity verbosity = SILENT; ///< Verbosity level
std::vector<size_t> knownInliers = std::vector<size_t>(); ///< Slots in the factor graph corresponding to measurements that we know are inliers
std::vector<size_t> knownOutliers = std::vector<size_t>(); ///< Slots in the factor graph corresponding to measurements that we know are outliers
/// Set the robust loss function to be used in GNC (chosen among the ones in GncLossType).
void setLossType(const GncLossType type) {
@ -112,8 +113,21 @@ class GncParams {
* only apply GNC to prune outliers from the loop closures.
* */
void setKnownInliers(const std::vector<size_t>& knownIn) {
for (size_t i = 0; i < knownIn.size(); i++)
for (size_t i = 0; i < knownIn.size(); i++){
knownInliers.push_back(knownIn[i]);
}
std::sort(knownInliers.begin(), knownInliers.end());
}
/** (Optional) Provide a vector of measurements that must be considered outliers. The enties in the vector
* corresponds to the slots in the factor graph. For instance, if you have a nonlinear factor graph nfg,
* and you provide knownOut = {0, 2, 15}, GNC will not apply outlier rejection to nfg[0], nfg[2], and nfg[15].
* */
void setKnownOutliers(const std::vector<size_t>& knownOut) {
for (size_t i = 0; i < knownOut.size(); i++){
knownOutliers.push_back(knownOut[i]);
}
std::sort(knownOutliers.begin(), knownOutliers.end());
}
/// Equals.
@ -121,7 +135,8 @@ class GncParams {
return baseOptimizerParams.equals(other.baseOptimizerParams)
&& lossType == other.lossType && maxIterations == other.maxIterations
&& std::fabs(muStep - other.muStep) <= tol
&& verbosity == other.verbosity && knownInliers == other.knownInliers;
&& verbosity == other.verbosity && knownInliers == other.knownInliers
&& knownOutliers == other.knownOutliers;
}
/// Print.
@ -144,6 +159,8 @@ class GncParams {
std::cout << "verbosity: " << verbosity << "\n";
for (size_t i = 0; i < knownInliers.size(); i++)
std::cout << "knownInliers: " << knownInliers[i] << "\n";
for (size_t i = 0; i < knownOutliers.size(); i++)
std::cout << "knownOutliers: " << knownOutliers[i] << "\n";
baseOptimizerParams.print(str);
}
};

View File

@ -749,6 +749,79 @@ TEST(GncOptimizer, optimizeSmallPoseGraph) {
CHECK(assert_equal(expected, actual, 1e-3)); // yay! we are robust to outliers!
}
/* ************************************************************************* */
TEST(GncOptimizer, knownInliersAndOutliers) {
auto fg = example::sharedNonRobustFactorGraphWithOutliers();
Point2 p0(1, 0);
Values initial;
initial.insert(X(1), p0);
std::vector<size_t> knownInliers;
knownInliers.push_back(0);
knownInliers.push_back(1);
knownInliers.push_back(2);
fg.print(" \n ");
// // nonconvexity with known inliers
// {
// GncParams<GaussNewtonParams> gncParams;
// gncParams.setKnownInliers(knownInliers);
// gncParams.setLossType(GncLossType::GM);
// //gncParams.setVerbosityGNC(GncParams<GaussNewtonParams>::Verbosity::SUMMARY);
// auto gnc = GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial,
// gncParams);
// gnc.setInlierCostThresholds(1.0);
// Values gnc_result = gnc.optimize();
// CHECK(assert_equal(Point2(0.0, 0.0), gnc_result.at<Point2>(X(1)), 1e-3));
//
// // check weights were actually fixed:
// Vector finalWeights = gnc.getWeights();
// DOUBLES_EQUAL(1.0, finalWeights[0], tol);
// DOUBLES_EQUAL(1.0, finalWeights[1], tol);
// DOUBLES_EQUAL(1.0, finalWeights[2], tol);
// }
// {
// GncParams<GaussNewtonParams> gncParams;
// gncParams.setKnownInliers(knownInliers);
// gncParams.setLossType(GncLossType::TLS);
// // gncParams.setVerbosityGNC(GncParams<GaussNewtonParams>::Verbosity::SUMMARY);
// auto gnc = GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial,
// gncParams);
//
// Values gnc_result = gnc.optimize();
// CHECK(assert_equal(Point2(0.0, 0.0), gnc_result.at<Point2>(X(1)), 1e-3));
//
// // check weights were actually fixed:
// Vector finalWeights = gnc.getWeights();
// DOUBLES_EQUAL(1.0, finalWeights[0], tol);
// DOUBLES_EQUAL(1.0, finalWeights[1], tol);
// DOUBLES_EQUAL(1.0, finalWeights[2], tol);
// DOUBLES_EQUAL(0.0, finalWeights[3], tol);
// }
// {
// // if we set the threshold large, they are all inliers
// GncParams<GaussNewtonParams> gncParams;
// gncParams.setKnownInliers(knownInliers);
// gncParams.setLossType(GncLossType::TLS);
// //gncParams.setVerbosityGNC(GncParams<GaussNewtonParams>::Verbosity::VALUES);
// auto gnc = GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial,
// gncParams);
// gnc.setInlierCostThresholds(100.0);
//
// Values gnc_result = gnc.optimize();
// CHECK(assert_equal(Point2(0.25, 0.0), gnc_result.at<Point2>(X(1)), 1e-3));
//
// // check weights were actually fixed:
// Vector finalWeights = gnc.getWeights();
// DOUBLES_EQUAL(1.0, finalWeights[0], tol);
// DOUBLES_EQUAL(1.0, finalWeights[1], tol);
// DOUBLES_EQUAL(1.0, finalWeights[2], tol);
// DOUBLES_EQUAL(1.0, finalWeights[3], tol);
// }
}
/* ************************************************************************* */
int main() {
TestResult tr;