addressed final comments by Frank

release/4.3a0
lcarlone 2020-12-30 14:13:40 -05:00
parent 24672385b3
commit 248eec8e41
2 changed files with 19 additions and 25 deletions

View File

@ -57,39 +57,25 @@ class GncOptimizer {
if (graph[i]) {
NoiseModelFactor::shared_ptr factor = boost::dynamic_pointer_cast<
NoiseModelFactor>(graph[i]);
noiseModel::Robust::shared_ptr robust = boost::dynamic_pointer_cast<
auto robust = boost::dynamic_pointer_cast<
noiseModel::Robust>(factor->noiseModel());
if (robust) { // if the factor has a robust loss, we have to change it:
SharedNoiseModel gaussianNoise = robust->noise();
NoiseModelFactor::shared_ptr gaussianFactor = factor
->cloneWithNewNoiseModel(gaussianNoise);
nfg_[i] = gaussianFactor;
} else { // else we directly push it back
nfg_[i] = factor;
}
// if the factor has a robust loss, we remove the robust loss
nfg_[i] = robust ? factor-> cloneWithNewNoiseModel(robust->noise()) : factor;
}
}
}
/// Access a copy of the internal factor graph.
NonlinearFactorGraph getFactors() const {
return NonlinearFactorGraph(nfg_);
}
const NonlinearFactorGraph& getFactors() const { return nfg_; }
/// Access a copy of the internal values.
Values getState() const {
return Values(state_);
}
const Values& getState() const { return state_; }
/// Access a copy of the parameters.
GncParameters getParams() const {
return GncParameters(params_);
}
const GncParameters& getParams() const { return params_;}
/// Access a copy of the GNC weights.
Vector getWeights() const {
return weights_;
}
const Vector& getWeights() const { return weights_;}
/// Compute optimal solution using graduated non-convexity.
Values optimize() {
@ -279,15 +265,14 @@ class GncOptimizer {
newGraph.resize(nfg_.size());
for (size_t i = 0; i < nfg_.size(); i++) {
if (nfg_[i]) {
NoiseModelFactor::shared_ptr factor = boost::dynamic_pointer_cast<
auto factor = boost::dynamic_pointer_cast<
NoiseModelFactor>(nfg_[i]);
noiseModel::Gaussian::shared_ptr noiseModel =
auto noiseModel =
boost::dynamic_pointer_cast<noiseModel::Gaussian>(
factor->noiseModel());
if (noiseModel) {
Matrix newInfo = weights[i] * noiseModel->information();
SharedNoiseModel newNoiseModel = noiseModel::Gaussian::Information(
newInfo);
auto newNoiseModel = noiseModel::Gaussian::Information(newInfo);
newGraph[i] = factor->cloneWithNewNoiseModel(newNoiseModel);
} else {
throw std::runtime_error(

View File

@ -77,6 +77,7 @@ class GncParams {
void setLossType(const GncLossType type) {
lossType = type;
}
/// Set the maximum number of iterations in GNC (changing the max nr of iters might lead to less accurate solutions and is not recommended).
void setMaxIterations(const size_t maxIter) {
std::cout
@ -84,6 +85,7 @@ class GncParams {
<< std::endl;
maxIterations = maxIter;
}
/** Set the maximum weighted residual error for an inlier. For a factor in the form f(x) = 0.5 * || r(x) ||^2_Omega,
* the inlier threshold is the largest value of f(x) for the corresponding measurement to be considered an inlier.
* In other words, an inlier at x is such that 0.5 * || r(x) ||^2_Omega <= barcSq.
@ -93,22 +95,27 @@ class GncParams {
void setInlierCostThreshold(const double inth) {
barcSq = inth;
}
/// Set the graduated non-convexity step: at each GNC iteration, mu is updated as mu <- mu * muStep.
void setMuStep(const double step) {
muStep = step;
}
/// Set the maximum relative difference in mu values to stop iterating.
void setRelativeCostTol(double value) {
relativeCostTol = value;
}
/// Set the maximum difference between the weights and their rounding in {0,1} to stop iterating.
void setWeightsTol(double value) {
weightsTol = value;
}
/// Set the verbosity level.
void setVerbosityGNC(const Verbosity value) {
verbosity = value;
}
/** (Optional) Provide a vector of measurements that must be considered inliers. The enties in the vector
* corresponds to the slots in the factor graph. For instance, if you have a nonlinear factor graph nfg,
* and you provide knownIn = {0, 2, 15}, GNC will not apply outlier rejection to nfg[0], nfg[2], and nfg[15].
@ -119,6 +126,7 @@ class GncParams {
for (size_t i = 0; i < knownIn.size(); i++)
knownInliers.push_back(knownIn[i]);
}
/// Equals.
bool equals(const GncParams& other, double tol = 1e-9) const {
return baseOptimizerParams.equals(other.baseOptimizerParams)
@ -127,6 +135,7 @@ class GncParams {
&& std::fabs(muStep - other.muStep) <= tol
&& verbosity == other.verbosity && knownInliers == other.knownInliers;
}
/// Print.
void print(const std::string& str) const {
std::cout << str << "\n";