Fixed error with IMU+Camera

release/4.3a0
djensen3 2014-05-15 15:57:22 -04:00
parent e5d4fe3aaf
commit 950e95b015
4 changed files with 42 additions and 31 deletions

View File

@ -20,22 +20,22 @@ if ~exist('externallyConfigured', 'var')
%% Configuration %% Configuration
% General options % General options
options.useRealData = 1; % controls whether or not to use the real data (if available) as the ground truth traj options.useRealData = 1; % controls whether or not to use the real data (if available) as the ground truth traj
options.includeBetweenFactors = 1; % if true, BetweenFactors will be added between consecutive poses options.includeBetweenFactors = 0; % if true, BetweenFactors will be added between consecutive poses
options.includeIMUFactors = 0; % if true, IMU factors will be added between consecutive states (biases, poses, velocities) options.includeIMUFactors = 1; % if true, IMU factors will be added between consecutive states (biases, poses, velocities)
options.imuFactorType = 1; % Set to 1 or 2 to use IMU type 1 or type 2 factors (will default to type 1) options.imuFactorType = 1; % Set to 1 or 2 to use IMU type 1 or type 2 factors (will default to type 1)
options.imuNonzeroBias = 0; % if true, a nonzero bias is applied to IMU measurements options.imuNonzeroBias = 0; % if true, a nonzero bias is applied to IMU measurements
options.includeCameraFactors = 1; % if true, SmartProjectionPose3Factors will be used with randomly generated landmarks options.includeCameraFactors = 1; % if true, SmartProjectionPose3Factors will be used with randomly generated landmarks
options.numberOfLandmarks = 1000; % Total number of visual landmarks (randomly generated in a box around the trajectory) options.numberOfLandmarks = 1000; % Total number of visual landmarks (randomly generated in a box around the trajectory)
options.includeGPSFactors = 0; % if true, GPS factors will be added as priors to poses options.includeGPSFactors = 0; % if true, GPS factors will be added as priors to poses
options.gpsStartPose = 100; % Pose number to start including GPS factors at options.gpsStartPose = 100; % Pose number to start including GPS factors at
options.trajectoryLength = 20;%209; % length of the ground truth trajectory options.trajectoryLength = 100;%209; % length of the ground truth trajectory
options.subsampleStep = 20; % number of poses to skip when using real data (to reduce computation on long trajectories) options.subsampleStep = 20; % number of poses to skip when using real data (to reduce computation on long trajectories)
numMonteCarloRuns = 1; % number of Monte Carlo runs to perform numMonteCarloRuns = 2; % number of Monte Carlo runs to perform
% Noise values to be adjusted % Noise values to be adjusted
sigma_ang = 1e-2; % std. deviation for rotational noise, typical 1e-2 sigma_ang = 1e-2; % std. deviation for rotational noise, typical 1e-2
@ -93,7 +93,7 @@ metadata.camera.xlims = [-100, 650]; % x limits on area for landmark creation
metadata.camera.ylims = [-100, 700]; % y limits on area for landmark creation metadata.camera.ylims = [-100, 700]; % y limits on area for landmark creation
metadata.camera.zlims = [-30, 30]; % z limits on area for landmark creation metadata.camera.zlims = [-30, 30]; % z limits on area for landmark creation
metadata.camera.visualRange = 100; % maximum distance from the camera that a landmark can be seen (meters) metadata.camera.visualRange = 100; % maximum distance from the camera that a landmark can be seen (meters)
metadata.camera.bodyPoseCamera = Pose3.Expmap([-pi/2;0;0;0;0;0]); % pose of camera in body metadata.camera.bodyPoseCamera = Pose3; % pose of camera in body
metadata.camera.CameraSigma = sigma_camera; metadata.camera.CameraSigma = sigma_camera;
cameraMeasurementNoise = noiseModel.Isotropic.Sigma(2, metadata.camera.CameraSigma); cameraMeasurementNoise = noiseModel.Isotropic.Sigma(2, metadata.camera.CameraSigma);
noiseVectorCamera = metadata.camera.CameraSigma .* ones(2,1); noiseVectorCamera = metadata.camera.CameraSigma .* ones(2,1);
@ -103,7 +103,7 @@ if options.includeCameraFactors == 1
for i = 1:options.numberOfLandmarks for i = 1:options.numberOfLandmarks
metadata.camera.gtLandmarkPoints(i) = Point3( ... metadata.camera.gtLandmarkPoints(i) = Point3( ...
[rand() * (metadata.camera.xlims(2)-metadata.camera.xlims(1)) + metadata.camera.xlims(1); ... [rand() * (metadata.camera.xlims(2)-metadata.camera.xlims(1)) + metadata.camera.xlims(1); ...
rand() * (metadata.camera.ylims(2)-metadata.camera.ylims(1)) + metadata.camera.ylims(1); rand() * (metadata.camera.ylims(2)-metadata.camera.ylims(1)) + metadata.camera.ylims(1); ...
rand() * (metadata.camera.zlims(2)-metadata.camera.zlims(1)) + metadata.camera.zlims(1)]); rand() * (metadata.camera.zlims(2)-metadata.camera.zlims(1)) + metadata.camera.zlims(1)]);
end end
end end
@ -147,26 +147,31 @@ metadata.imu.gyroConstantBiasVector = zeros(3,1);
figure(1) figure(1)
hold on; hold on;
b = [-1000 2000 -2000 2000 -30 30];
for i = 1:size(metadata.camera.gtLandmarkPoints,2) if options.includeCameraFactors
p = metadata.camera.gtLandmarkPoints(i).vector; b = [-1000 2000 -2000 2000 -30 30];
if(p(1) > b(1) && p(1) < b(2) && p(2) > b(3) && p(2) < b(4) && p(3) > b(5) && p(3) < b(6)) for i = 1:size(metadata.camera.gtLandmarkPoints,2)
plot3(p(1), p(2), p(3), 'k+'); p = metadata.camera.gtLandmarkPoints(i).vector;
end if(p(1) > b(1) && p(1) < b(2) && p(2) > b(3) && p(2) < b(4) && p(3) > b(5) && p(3) < b(6))
end plot3(p(1), p(2), p(3), 'k+');
pointsToPlot = metadata.camera.gtLandmarkPoints(find(projectionFactorSeenBy > 0)); end
for i = 1:length(pointsToPlot) end
p = pointsToPlot(i).vector; pointsToPlot = metadata.camera.gtLandmarkPoints(find(projectionFactorSeenBy > 0));
plot3(p(1), p(2), p(3), 'gs', 'MarkerSize', 10); for i = 1:length(pointsToPlot)
p = pointsToPlot(i).vector;
plot3(p(1), p(2), p(3), 'gs', 'MarkerSize', 10);
end
end end
plot3DPoints(gtValues); plot3DPoints(gtValues);
%plot3DTrajectory(gtValues, '-r', [], 1, Marginals(gtGraph, gtValues)); %plot3DTrajectory(gtValues, '-r', [], 1, Marginals(gtGraph, gtValues));
plot3DTrajectory(gtValues, '-r'); plot3DTrajectory(gtValues, '-r');
axis equal axis equal
% optimize % optimize
optimizer = GaussNewtonOptimizer(gtGraph, gtValues); optimizer = GaussNewtonOptimizer(gtGraph, gtValues);
gtEstimate = optimizer.optimize(); gtEstimate = optimizer.optimize();
plot3DTrajectory(gtEstimate, '-k');
% estimate should match gtValues if graph is correct. % estimate should match gtValues if graph is correct.
fprintf('Error in ground truth graph at gtValues: %g \n', gtGraph.error(gtValues) ); fprintf('Error in ground truth graph at gtValues: %g \n', gtGraph.error(gtValues) );
fprintf('Error in ground truth graph at gtEstimate: %g \n', gtGraph.error(gtEstimate) ); fprintf('Error in ground truth graph at gtEstimate: %g \n', gtGraph.error(gtEstimate) );

View File

@ -19,7 +19,7 @@ for i=0:length(measurements)
if i==0 if i==0
%% first time step, add priors %% first time step, add priors
% Pose prior (poses used for all factors) % Pose prior (poses used for all factors)
noisyInitialPoseVector = Pose3.Logmap(currentPose) + measurementNoise.poseNoiseVector .* rand(6,1); noisyInitialPoseVector = Pose3.Logmap(currentPose) + measurementNoise.poseNoiseVector .* randn(6,1);
initialPose = Pose3.Expmap(noisyInitialPoseVector); initialPose = Pose3.Expmap(noisyInitialPoseVector);
graph.add(PriorFactorPose3(currentPoseKey, initialPose, noiseModels.noisePose)); graph.add(PriorFactorPose3(currentPoseKey, initialPose, noiseModels.noisePose));
@ -35,9 +35,10 @@ for i=0:length(measurements)
end end
%% Create a SmartProjectionFactor for each landmark %% Create a SmartProjectionFactor for each landmark
projectionFactorSeenBy = [];
if options.includeCameraFactors == 1 if options.includeCameraFactors == 1
for j=1:options.numberOfLandmarks for j=1:options.numberOfLandmarks
SmartProjectionFactors(j) = SmartProjectionPose3Factor(); SmartProjectionFactors(j) = SmartProjectionPose3Factor(0.01);
% Use constructor with default values, but express the pose of the % Use constructor with default values, but express the pose of the
% camera as a 90 degree rotation about the X axis % camera as a 90 degree rotation about the X axis
% SmartProjectionFactors(j) = SmartProjectionPose3Factor( ... % SmartProjectionFactors(j) = SmartProjectionPose3Factor( ...
@ -46,7 +47,6 @@ for i=0:length(measurements)
% false, ... % manageDegeneracy % false, ... % manageDegeneracy
% false, ... % enableEPI % false, ... % enableEPI
% metadata.camera.bodyPoseCamera); % Pose of camera in body frame % metadata.camera.bodyPoseCamera); % Pose of camera in body frame
end end
projectionFactorSeenBy = zeros(options.numberOfLandmarks,1); projectionFactorSeenBy = zeros(options.numberOfLandmarks,1);
end end
@ -186,7 +186,7 @@ end % end of for over trajectory
%% Add Camera Factors to the graph %% Add Camera Factors to the graph
% Only factors for landmarks that have been viewed at least once are added % Only factors for landmarks that have been viewed at least once are added
% to the graph % to the graph
[find(projectionFactorSeenBy ~= 0) projectionFactorSeenBy(find(projectionFactorSeenBy ~= 0))] %[find(projectionFactorSeenBy ~= 0) projectionFactorSeenBy(find(projectionFactorSeenBy ~= 0))]
if options.includeCameraFactors == 1 if options.includeCameraFactors == 1
for j = 1:options.numberOfLandmarks for j = 1:options.numberOfLandmarks
if projectionFactorSeenBy(j) > 0 if projectionFactorSeenBy(j) > 0

View File

@ -9,7 +9,8 @@ import gtsam.*;
values = Values; values = Values;
warning('fake angles! TODO: use constructor from roll-pitch-yaw when using real data - currently using identity rotation') warning('Rotating Pose inside getPoseFromGtScenario! TODO: Use a body_P_sensor transform in the factors')
if options.useRealData == 1 if options.useRealData == 1
%% Create a ground truth trajectory from Real data (if available) %% Create a ground truth trajectory from Real data (if available)
@ -29,8 +30,10 @@ if options.useRealData == 1
%% Generate the current pose %% Generate the current pose
currentPoseKey = symbol('x', i); currentPoseKey = symbol('x', i);
currentPose = imuSimulator.getPoseFromGtScenario(gtScenario,scenarioInd); currentPose = imuSimulator.getPoseFromGtScenario(gtScenario,scenarioInd);
%% FOR TESTING
currentPose = currentPose.compose(metadata.camera.bodyPoseCamera); %% FOR TESTING - this is currently moved to getPoseFromGtScenario
%currentPose = currentPose.compose(metadata.camera.bodyPoseCamera);
%currentPose = currentPose.compose(Pose3.Expmap([-pi/2;0;0;0;0;0]));
% add to values % add to values
values.insert(currentPoseKey, currentPose); values.insert(currentPoseKey, currentPose);
@ -57,7 +60,7 @@ if options.useRealData == 1
scenarioIndIMU2 = previousScenarioInd+j; scenarioIndIMU2 = previousScenarioInd+j;
IMUPose1 = imuSimulator.getPoseFromGtScenario(gtScenario,scenarioIndIMU1); IMUPose1 = imuSimulator.getPoseFromGtScenario(gtScenario,scenarioIndIMU1);
IMUPose2 = imuSimulator.getPoseFromGtScenario(gtScenario,scenarioIndIMU2); IMUPose2 = imuSimulator.getPoseFromGtScenario(gtScenario,scenarioIndIMU2);
IMURot2 = IMUPose2.rotation.matrix; IMURot1 = IMUPose1.rotation.matrix;
IMUdeltaPose = IMUPose1.between(IMUPose2); IMUdeltaPose = IMUPose1.between(IMUPose2);
IMUdeltaPoseVector = Pose3.Logmap(IMUdeltaPose); IMUdeltaPoseVector = Pose3.Logmap(IMUdeltaPose);
@ -71,10 +74,10 @@ if options.useRealData == 1
% deltaPij += deltaVij * deltaT + 0.5 * deltaRij.matrix() * biasHat.correctAccelerometer(measuredAcc) * deltaT*deltaT; % deltaPij += deltaVij * deltaT + 0.5 * deltaRij.matrix() * biasHat.correctAccelerometer(measuredAcc) * deltaT*deltaT;
% acc = (deltaPosition - initialVel * dT) * (2/dt^2) % acc = (deltaPosition - initialVel * dT) * (2/dt^2)
measurements(i).imu(j).accel = IMURot2' * (IMUdeltaPositionVector - currentVel.*deltaT).*(2/(deltaT*deltaT)); measurements(i).imu(j).accel = IMURot1' * (IMUdeltaPositionVector - currentVel.*deltaT).*(2/(deltaT*deltaT));
% Update velocity % Update velocity
currentVel = currentVel + IMURot2 * measurements(i).imu(j).accel * deltaT; currentVel = currentVel + IMURot1 * measurements(i).imu(j).accel * deltaT;
end end
end end
@ -94,8 +97,8 @@ if options.useRealData == 1
% Create the camera based on the current pose and the pose of the % Create the camera based on the current pose and the pose of the
% camera in the body % camera in the body
cameraPose = currentPose.compose(metadata.camera.bodyPoseCamera); cameraPose = currentPose.compose(metadata.camera.bodyPoseCamera);
%camera = SimpleCamera(cameraPose, metadata.camera.calibration); camera = SimpleCamera(cameraPose, metadata.camera.calibration);
camera = SimpleCamera(currentPose, metadata.camera.calibration); %camera = SimpleCamera(currentPose, metadata.camera.calibration);
% Record measurements if the landmark is within visual range % Record measurements if the landmark is within visual range
for j=1:length(metadata.camera.gtLandmarkPoints) for j=1:length(metadata.camera.gtLandmarkPoints)

View File

@ -30,7 +30,10 @@ gtPosition = Point3([xlt, ylt, zlt]');
% pitch = % pitch =
% roll = % roll =
% Coordinate frame is Y forward, X is right, Z is up % Coordinate frame is Y forward, X is right, Z is up
gtBodyRotation = Rot3.Ypr(-gtScenario.Heading(scenarioInd), -gtScenario.Pitch(scenarioInd), -gtScenario.Roll(scenarioInd)); gtBodyRotation = Rot3.Ypr(-gtScenario.Heading(scenarioInd), gtScenario.Pitch(scenarioInd), gtScenario.Roll(scenarioInd));
currentPose = Pose3(gtBodyRotation, gtPosition); currentPose = Pose3(gtBodyRotation, gtPosition);
%% Rotate the pose
currentPose = currentPose.compose(Pose3.Expmap([-pi/2;0;0;0;0;0]));
end end