Merge pull request #920 from borglab/fix/minor-stuff
commit
f454bcbdf8
|
@ -36,17 +36,17 @@ namespace gtsam {
|
|||
// no Ordering is provided. When removing optional from VariableIndex, create VariableIndex
|
||||
// before creating ordering.
|
||||
VariableIndex computedVariableIndex(asDerived());
|
||||
return eliminateSequential(function, computedVariableIndex, orderingType);
|
||||
return eliminateSequential(orderingType, function, computedVariableIndex);
|
||||
}
|
||||
else {
|
||||
// Compute an ordering and call this function again. We are guaranteed to have a
|
||||
// VariableIndex already here because we computed one if needed in the previous 'if' block.
|
||||
if (orderingType == Ordering::METIS) {
|
||||
Ordering computedOrdering = Ordering::Metis(asDerived());
|
||||
return eliminateSequential(computedOrdering, function, variableIndex, orderingType);
|
||||
return eliminateSequential(computedOrdering, function, variableIndex);
|
||||
} else {
|
||||
Ordering computedOrdering = Ordering::Colamd(*variableIndex);
|
||||
return eliminateSequential(computedOrdering, function, variableIndex, orderingType);
|
||||
return eliminateSequential(computedOrdering, function, variableIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -142,8 +142,9 @@ class GTSAM_EXPORT GncOptimizer {
|
|||
* provides an extra interface for the user to initialize the weightst
|
||||
* */
|
||||
void setWeights(const Vector w) {
|
||||
if(w.size() != nfg_.size()){
|
||||
throw std::runtime_error("GncOptimizer::setWeights: the number of specified weights"
|
||||
if (size_t(w.size()) != nfg_.size()) {
|
||||
throw std::runtime_error(
|
||||
"GncOptimizer::setWeights: the number of specified weights"
|
||||
" does not match the size of the factor graph.");
|
||||
}
|
||||
weights_ = w;
|
||||
|
|
|
@ -35,8 +35,7 @@ namespace gtsam {
|
|||
* zero errors anyway. However, it means that below will only be exact for the correct measurement.
|
||||
*/
|
||||
JacobianFactor linearizeNumerically(const NoiseModelFactor& factor,
|
||||
const Values& values, double delta = 1e-5) {
|
||||
|
||||
const Values& values, double delta = 1e-5) {
|
||||
// We will fill a vector of key/Jacobians pairs (a map would sort)
|
||||
std::vector<std::pair<Key, Matrix> > jacobians;
|
||||
|
||||
|
@ -46,24 +45,24 @@ JacobianFactor linearizeNumerically(const NoiseModelFactor& factor,
|
|||
|
||||
// Loop over all variables
|
||||
const double one_over_2delta = 1.0 / (2.0 * delta);
|
||||
for(Key key: factor) {
|
||||
for (Key key : factor) {
|
||||
// Compute central differences using the values struct.
|
||||
VectorValues dX = values.zeroVectors();
|
||||
const size_t cols = dX.dim(key);
|
||||
Matrix J = Matrix::Zero(rows, cols);
|
||||
for (size_t col = 0; col < cols; ++col) {
|
||||
Eigen::VectorXd dx = Eigen::VectorXd::Zero(cols);
|
||||
dx[col] = delta;
|
||||
dx(col) = delta;
|
||||
dX[key] = dx;
|
||||
Values eval_values = values.retract(dX);
|
||||
const Eigen::VectorXd left = factor.whitenedError(eval_values);
|
||||
dx[col] = -delta;
|
||||
dx(col) = -delta;
|
||||
dX[key] = dx;
|
||||
eval_values = values.retract(dX);
|
||||
const Eigen::VectorXd right = factor.whitenedError(eval_values);
|
||||
J.col(col) = (left - right) * one_over_2delta;
|
||||
}
|
||||
jacobians.push_back(std::make_pair(key,J));
|
||||
jacobians.push_back(std::make_pair(key, J));
|
||||
}
|
||||
|
||||
// Next step...return JacobianFactor
|
||||
|
|
|
@ -115,6 +115,10 @@ class Ordering {
|
|||
Ordering();
|
||||
Ordering(const gtsam::Ordering& other);
|
||||
|
||||
template <FACTOR_GRAPH = {gtsam::NonlinearFactorGraph,
|
||||
gtsam::GaussianFactorGraph}>
|
||||
static gtsam::Ordering Colamd(const FACTOR_GRAPH& graph);
|
||||
|
||||
// Testable
|
||||
void print(string s = "", const gtsam::KeyFormatter& keyFormatter =
|
||||
gtsam::DefaultKeyFormatter) const;
|
||||
|
|
|
@ -7,73 +7,79 @@
|
|||
See LICENSE for the license information
|
||||
|
||||
Solve a structure-from-motion problem from a "Bundle Adjustment in the Large" file
|
||||
Author: Frank Dellaert (Python: Akshay Krishnan, John Lambert)
|
||||
Author: Frank Dellaert (Python: Akshay Krishnan, John Lambert, Varun Agrawal)
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
|
||||
import gtsam
|
||||
from gtsam import (
|
||||
GeneralSFMFactorCal3Bundler,
|
||||
PinholeCameraCal3Bundler,
|
||||
PriorFactorPinholeCameraCal3Bundler,
|
||||
readBal,
|
||||
symbol_shorthand
|
||||
)
|
||||
from gtsam import (GeneralSFMFactorCal3Bundler,
|
||||
PriorFactorPinholeCameraCal3Bundler, PriorFactorPoint3)
|
||||
from gtsam.symbol_shorthand import C, P # type: ignore
|
||||
from gtsam.utils import plot # type: ignore
|
||||
from matplotlib import pyplot as plt
|
||||
|
||||
C = symbol_shorthand.C
|
||||
P = symbol_shorthand.P
|
||||
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
|
||||
|
||||
DEFAULT_BAL_DATASET = "dubrovnik-3-7-pre"
|
||||
|
||||
|
||||
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
|
||||
def plot_scene(scene_data: gtsam.SfmData, result: gtsam.Values) -> None:
|
||||
"""Plot the SFM results."""
|
||||
plot_vals = gtsam.Values()
|
||||
for cam_idx in range(scene_data.number_cameras()):
|
||||
plot_vals.insert(C(cam_idx),
|
||||
result.atPinholeCameraCal3Bundler(C(cam_idx)).pose())
|
||||
for j in range(scene_data.number_tracks()):
|
||||
plot_vals.insert(P(j), result.atPoint3(P(j)))
|
||||
|
||||
def run(args):
|
||||
plot.plot_3d_points(0, plot_vals, linespec="g.")
|
||||
plot.plot_trajectory(0, plot_vals, title="SFM results")
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
def run(args: argparse.Namespace) -> None:
|
||||
""" Run LM optimization with BAL input data and report resulting error """
|
||||
input_file = gtsam.findExampleDataFile(args.input_file)
|
||||
input_file = args.input_file
|
||||
|
||||
# Load the SfM data from file
|
||||
scene_data = readBal(input_file)
|
||||
logging.info(f"read {scene_data.number_tracks()} tracks on {scene_data.number_cameras()} cameras\n")
|
||||
scene_data = gtsam.readBal(input_file)
|
||||
logging.info("read %d tracks on %d cameras\n", scene_data.number_tracks(),
|
||||
scene_data.number_cameras())
|
||||
|
||||
# Create a factor graph
|
||||
graph = gtsam.NonlinearFactorGraph()
|
||||
|
||||
# We share *one* noiseModel between all projection factors
|
||||
noise = gtsam.noiseModel.Isotropic.Sigma(2, 1.0) # one pixel in u and v
|
||||
noise = gtsam.noiseModel.Isotropic.Sigma(2, 1.0) # one pixel in u and v
|
||||
|
||||
# Add measurements to the factor graph
|
||||
j = 0
|
||||
for t_idx in range(scene_data.number_tracks()):
|
||||
track = scene_data.track(t_idx) # SfmTrack
|
||||
for j in range(scene_data.number_tracks()):
|
||||
track = scene_data.track(j) # SfmTrack
|
||||
# retrieve the SfmMeasurement objects
|
||||
for m_idx in range(track.number_measurements()):
|
||||
# i represents the camera index, and uv is the 2d measurement
|
||||
i, uv = track.measurement(m_idx)
|
||||
# note use of shorthand symbols C and P
|
||||
graph.add(GeneralSFMFactorCal3Bundler(uv, noise, C(i), P(j)))
|
||||
j += 1
|
||||
|
||||
# Add a prior on pose x1. This indirectly specifies where the origin is.
|
||||
graph.push_back(
|
||||
gtsam.PriorFactorPinholeCameraCal3Bundler(
|
||||
C(0), scene_data.camera(0), gtsam.noiseModel.Isotropic.Sigma(9, 0.1)
|
||||
)
|
||||
)
|
||||
PriorFactorPinholeCameraCal3Bundler(
|
||||
C(0), scene_data.camera(0),
|
||||
gtsam.noiseModel.Isotropic.Sigma(9, 0.1)))
|
||||
# Also add a prior on the position of the first landmark to fix the scale
|
||||
graph.push_back(
|
||||
gtsam.PriorFactorPoint3(
|
||||
P(0), scene_data.track(0).point3(), gtsam.noiseModel.Isotropic.Sigma(3, 0.1)
|
||||
)
|
||||
)
|
||||
PriorFactorPoint3(P(0),
|
||||
scene_data.track(0).point3(),
|
||||
gtsam.noiseModel.Isotropic.Sigma(3, 0.1)))
|
||||
|
||||
# Create initial estimate
|
||||
initial = gtsam.Values()
|
||||
|
||||
|
||||
i = 0
|
||||
# add each PinholeCameraCal3Bundler
|
||||
for cam_idx in range(scene_data.number_cameras()):
|
||||
|
@ -81,12 +87,10 @@ def run(args):
|
|||
initial.insert(C(i), camera)
|
||||
i += 1
|
||||
|
||||
j = 0
|
||||
# add each SfmTrack
|
||||
for t_idx in range(scene_data.number_tracks()):
|
||||
track = scene_data.track(t_idx)
|
||||
for j in range(scene_data.number_tracks()):
|
||||
track = scene_data.track(j)
|
||||
initial.insert(P(j), track.point3())
|
||||
j += 1
|
||||
|
||||
# Optimize the graph and print results
|
||||
try:
|
||||
|
@ -94,25 +98,31 @@ def run(args):
|
|||
params.setVerbosityLM("ERROR")
|
||||
lm = gtsam.LevenbergMarquardtOptimizer(graph, initial, params)
|
||||
result = lm.optimize()
|
||||
except Exception as e:
|
||||
except RuntimeError:
|
||||
logging.exception("LM Optimization failed")
|
||||
return
|
||||
|
||||
# Error drops from ~2764.22 to ~0.046
|
||||
logging.info(f"final error: {graph.error(result)}")
|
||||
logging.info("initial error: %f", graph.error(initial))
|
||||
logging.info("final error: %f", graph.error(result))
|
||||
|
||||
plot_scene(scene_data, result)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Main runner."""
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-i',
|
||||
'--input_file',
|
||||
type=str,
|
||||
default=gtsam.findExampleDataFile(DEFAULT_BAL_DATASET),
|
||||
help="""Read SFM data from the specified BAL file.
|
||||
The data format is described here: https://grail.cs.washington.edu/projects/bal/.
|
||||
BAL files contain (nrPoses, nrPoints, nrObservations), followed by (i,j,u,v) tuples,
|
||||
then (wx,wy,wz,tx,ty,tz,f,k1,k1) as Bundler camera calibrations w/ Rodrigues vector
|
||||
and (x,y,z) 3d point initializations.""")
|
||||
run(parser.parse_args())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
'-i',
|
||||
'--input_file',
|
||||
type=str,
|
||||
default="dubrovnik-3-7-pre",
|
||||
help='Read SFM data from the specified BAL file'
|
||||
'The data format is described here: https://grail.cs.washington.edu/projects/bal/.'
|
||||
'BAL files contain (nrPoses, nrPoints, nrObservations), followed by (i,j,u,v) tuples, '
|
||||
'then (wx,wy,wz,tx,ty,tz,f,k1,k1) as Bundler camera calibrations w/ Rodrigues vector'
|
||||
'and (x,y,z) 3d point initializations.'
|
||||
)
|
||||
run(parser.parse_args())
|
||||
|
||||
main()
|
||||
|
|
Loading…
Reference in New Issue