Merge branch 'fix/matlab_examples_wrapper' into develop

release/4.3a0
lvzhaoyang 2014-12-19 11:27:20 -05:00
commit 3363edf77c
19 changed files with 179 additions and 61 deletions

View File

@ -1730,6 +1730,7 @@ class Values {
void insert(size_t j, const gtsam::Cal3DS2& t); void insert(size_t j, const gtsam::Cal3DS2& t);
void insert(size_t j, const gtsam::Cal3Bundler& t); void insert(size_t j, const gtsam::Cal3Bundler& t);
void insert(size_t j, const gtsam::EssentialMatrix& t); void insert(size_t j, const gtsam::EssentialMatrix& t);
void insert(size_t j, const gtsam::SimpleCamera& t);
void insert(size_t j, const gtsam::imuBias::ConstantBias& t); void insert(size_t j, const gtsam::imuBias::ConstantBias& t);
void insert(size_t j, Vector t); void insert(size_t j, Vector t);
void insert(size_t j, Matrix t); void insert(size_t j, Matrix t);
@ -2149,7 +2150,7 @@ class NonlinearISAM {
#include <gtsam/geometry/StereoPoint2.h> #include <gtsam/geometry/StereoPoint2.h>
#include <gtsam/slam/PriorFactor.h> #include <gtsam/slam/PriorFactor.h>
template<T = { gtsam::Point2, gtsam::StereoPoint2, gtsam::Point3, gtsam::Rot2, gtsam::Rot3, gtsam::Pose2, gtsam::Pose3, gtsam::Cal3_S2,gtsam::CalibratedCamera, gtsam::SimpleCamera, gtsam::imuBias::ConstantBias}> template<T = {Vector, gtsam::Point2, gtsam::StereoPoint2, gtsam::Point3, gtsam::Rot2, gtsam::Rot3, gtsam::Pose2, gtsam::Pose3, gtsam::Cal3_S2,gtsam::CalibratedCamera, gtsam::SimpleCamera, gtsam::imuBias::ConstantBias}>
virtual class PriorFactor : gtsam::NoiseModelFactor { virtual class PriorFactor : gtsam::NoiseModelFactor {
PriorFactor(size_t key, const T& prior, const gtsam::noiseModel::Base* noiseModel); PriorFactor(size_t key, const T& prior, const gtsam::noiseModel::Base* noiseModel);
T prior() const; T prior() const;

View File

@ -33,7 +33,7 @@ GPSskip = 10; % Skip this many GPS measurements each time
%% Get initial conditions for the estimated trajectory %% Get initial conditions for the estimated trajectory
currentPoseGlobal = Pose3(Rot3, GPS_data(firstGPSPose).Position); % initial pose is the reference frame (navigation frame) currentPoseGlobal = Pose3(Rot3, GPS_data(firstGPSPose).Position); % initial pose is the reference frame (navigation frame)
currentVelocityGlobal = LieVector([0;0;0]); % the vehicle is stationary at the beginning currentVelocityGlobal = [0;0;0]; % the vehicle is stationary at the beginning
currentBias = imuBias.ConstantBias(zeros(3,1), zeros(3,1)); currentBias = imuBias.ConstantBias(zeros(3,1), zeros(3,1));
sigma_init_x = noiseModel.Isotropic.Precisions([ 0.0; 0.0; 0.0; 1; 1; 1 ]); sigma_init_x = noiseModel.Isotropic.Precisions([ 0.0; 0.0; 0.0; 1; 1; 1 ]);
sigma_init_v = noiseModel.Isotropic.Sigma(3, 1000.0); sigma_init_v = noiseModel.Isotropic.Sigma(3, 1000.0);
@ -72,7 +72,7 @@ for measurementIndex = firstGPSPose:length(GPS_data)
newValues.insert(currentVelKey, currentVelocityGlobal); newValues.insert(currentVelKey, currentVelocityGlobal);
newValues.insert(currentBiasKey, currentBias); newValues.insert(currentBiasKey, currentBias);
newFactors.add(PriorFactorPose3(currentPoseKey, currentPoseGlobal, sigma_init_x)); newFactors.add(PriorFactorPose3(currentPoseKey, currentPoseGlobal, sigma_init_x));
newFactors.add(PriorFactorLieVector(currentVelKey, currentVelocityGlobal, sigma_init_v)); newFactors.add(PriorFactorVector(currentVelKey, currentVelocityGlobal, sigma_init_v));
newFactors.add(PriorFactorConstantBias(currentBiasKey, currentBias, sigma_init_b)); newFactors.add(PriorFactorConstantBias(currentBiasKey, currentBias, sigma_init_b));
else else
t_previous = GPS_data(measurementIndex-1, 1).Time; t_previous = GPS_data(measurementIndex-1, 1).Time;

View File

@ -40,7 +40,7 @@ end
%% Create initial estimate %% Create initial estimate
initial = Values; initial = Values;
trueE = EssentialMatrix(aRb,Sphere2(aTb)); trueE = EssentialMatrix(aRb,Unit3(aTb));
initialE = trueE.retract([0.1, -0.1, 0.1, 0.1, -0.1]'); initialE = trueE.retract([0.1, -0.1, 0.1, 0.1, -0.1]');
initial.insert(1, initialE); initial.insert(1, initialE);
@ -52,5 +52,5 @@ result = optimizer.optimize();
%% Print result (as essentialMatrix) and error %% Print result (as essentialMatrix) and error
error = graph.error(result) error = graph.error(result)
E = result.at(1) E = result.atEssentialMatrix(1)

View File

@ -0,0 +1,18 @@
% test wrapping of Values
import gtsam.*;
values = Values;
key = 5;
priorPose3 = Pose3;
model = noiseModel.Unit.Create(6);
factor = PriorFactorPose3(key, priorPose3, model);
values.insert(key, priorPose3);
EXPECT('error', factor.error(values) == 0);
key = 3;
priorVector = [0,0,0]';
model = noiseModel.Unit.Create(3);
factor = PriorFactorVector(key, priorVector, model);
values.insert(key, priorVector);
EXPECT('error', factor.error(values) == 0);

View File

@ -1,5 +1,8 @@
% Test runner script - runs each test % Test runner script - runs each test
% display 'Starting: testPriorFactor'
% testPriorFactor
display 'Starting: testValues' display 'Starting: testValues'
testValues testValues

View File

@ -57,7 +57,7 @@ isamParams.setFactorization('QR');
isam = ISAM2(isamParams); isam = ISAM2(isamParams);
%% Get initial conditions for the estimated trajectory %% Get initial conditions for the estimated trajectory
currentVelocityGlobal = LieVector([10;0;0]); % (This is slightly wrong!) currentVelocityGlobal = [10;0;0]; % (This is slightly wrong!) Zhaoyang: Fixed
currentBias = imuBias.ConstantBias(zeros(3,1), zeros(3,1)); currentBias = imuBias.ConstantBias(zeros(3,1), zeros(3,1));
sigma_init_v = noiseModel.Isotropic.Sigma(3, 1.0); sigma_init_v = noiseModel.Isotropic.Sigma(3, 1.0);
@ -101,7 +101,7 @@ axis equal;
for i=1:size(trajectory)-1 for i=1:size(trajectory)-1
xKey = symbol('x',i); xKey = symbol('x',i);
pose = trajectory.at(xKey); % GT pose pose = trajectory.atPose3(xKey); % GT pose
pose_t = pose.translation(); % GT pose-translation pose_t = pose.translation(); % GT pose-translation
if exist('h_cursor','var') if exist('h_cursor','var')
@ -165,13 +165,13 @@ for i=1:size(trajectory)-1
if i > 1 if i > 1
xKey_prev = symbol('x',i-1); xKey_prev = symbol('x',i-1);
pose_prev = trajectory.at(xKey_prev); pose_prev = trajectory.atPose3(xKey_prev);
step = pose_prev.between(pose); step = pose_prev.between(pose);
% insert estimate for current pose with some normal noise on % insert estimate for current pose with some normal noise on
% translation % translation
initial.insert(xKey,result.at(xKey_prev).compose(step.retract([0; 0; 0; normrnd(0,0.2,3,1)]))); initial.insert(xKey,result.atPose3(xKey_prev).compose(step.retract([0; 0; 0; normrnd(0,0.2,3,1)])));
% visual measurements % visual measurements
if measurements.size > 0 && use_camera if measurements.size > 0 && use_camera
@ -181,12 +181,12 @@ for i=1:size(trajectory)-1
zKey = measurementKeys.at(zz); zKey = measurementKeys.at(zz);
lKey = symbol('l',symbolIndex(zKey)); lKey = symbol('l',symbolIndex(zKey));
fg.add(TransformCalProjectionFactorCal3_S2(measurements.at(zKey), ... fg.add(TransformCalProjectionFactorCal3_S2(measurements.atPoint2(zKey), ...
z_cov, xKey, transformKey, lKey, calibrationKey, false, true)); z_cov, xKey, transformKey, lKey, calibrationKey, false, true));
% only add landmark to values if doesn't exist yet % only add landmark to values if doesn't exist yet
if ~result.exists(lKey) if ~result.exists(lKey)
noisy_landmark = landmarks.at(lKey).compose(Point3(normrnd(0,landmark_noise,3,1))); noisy_landmark = landmarks.atPoint3(lKey).compose(Point3(normrnd(0,landmark_noise,3,1)));
initial.insert(lKey, noisy_landmark); initial.insert(lKey, noisy_landmark);
% and add a prior since its position is known % and add a prior since its position is known

View File

@ -7,7 +7,7 @@ disp('Example of application of ISAM2 for visual-inertial navigation on the KITT
%% Read metadata and compute relative sensor pose transforms %% Read metadata and compute relative sensor pose transforms
% IMU metadata % IMU metadata
disp('-- Reading sensor metadata') disp('-- Reading sensor metadata')
IMU_metadata = importdata('KittiEquivBiasedImu_metadata.txt'); IMU_metadata = importdata(findExampleDataFile('KittiEquivBiasedImu_metadata.txt'));
IMU_metadata = cell2struct(num2cell(IMU_metadata.data), IMU_metadata.colheaders, 2); IMU_metadata = cell2struct(num2cell(IMU_metadata.data), IMU_metadata.colheaders, 2);
IMUinBody = Pose3.Expmap([IMU_metadata.BodyPtx; IMU_metadata.BodyPty; IMU_metadata.BodyPtz; IMUinBody = Pose3.Expmap([IMU_metadata.BodyPtx; IMU_metadata.BodyPty; IMU_metadata.BodyPtz;
IMU_metadata.BodyPrx; IMU_metadata.BodyPry; IMU_metadata.BodyPrz; ]); IMU_metadata.BodyPrx; IMU_metadata.BodyPry; IMU_metadata.BodyPrz; ]);
@ -16,14 +16,14 @@ if ~IMUinBody.equals(Pose3, 1e-5)
end end
% VO metadata % VO metadata
VO_metadata = importdata('KittiRelativePose_metadata.txt'); VO_metadata = importdata(findExampleDataFile('KittiRelativePose_metadata.txt'));
VO_metadata = cell2struct(num2cell(VO_metadata.data), VO_metadata.colheaders, 2); VO_metadata = cell2struct(num2cell(VO_metadata.data), VO_metadata.colheaders, 2);
VOinBody = Pose3.Expmap([VO_metadata.BodyPtx; VO_metadata.BodyPty; VO_metadata.BodyPtz; VOinBody = Pose3.Expmap([VO_metadata.BodyPtx; VO_metadata.BodyPty; VO_metadata.BodyPtz;
VO_metadata.BodyPrx; VO_metadata.BodyPry; VO_metadata.BodyPrz; ]); VO_metadata.BodyPrx; VO_metadata.BodyPry; VO_metadata.BodyPrz; ]);
VOinIMU = IMUinBody.inverse().compose(VOinBody); VOinIMU = IMUinBody.inverse().compose(VOinBody);
% GPS metadata % GPS metadata
GPS_metadata = importdata('KittiGps_metadata.txt'); GPS_metadata = importdata(findExampleDataFile('KittiGps_metadata.txt'));
GPS_metadata = cell2struct(num2cell(GPS_metadata.data), GPS_metadata.colheaders, 2); GPS_metadata = cell2struct(num2cell(GPS_metadata.data), GPS_metadata.colheaders, 2);
GPSinBody = Pose3.Expmap([GPS_metadata.BodyPtx; GPS_metadata.BodyPty; GPS_metadata.BodyPtz; GPSinBody = Pose3.Expmap([GPS_metadata.BodyPtx; GPS_metadata.BodyPty; GPS_metadata.BodyPtz;
GPS_metadata.BodyPrx; GPS_metadata.BodyPry; GPS_metadata.BodyPrz; ]); GPS_metadata.BodyPrx; GPS_metadata.BodyPry; GPS_metadata.BodyPrz; ]);
@ -32,7 +32,7 @@ GPSinIMU = IMUinBody.inverse().compose(GPSinBody);
%% Read data and change coordinate frame of GPS and VO measurements to IMU frame %% Read data and change coordinate frame of GPS and VO measurements to IMU frame
disp('-- Reading sensor data from file') disp('-- Reading sensor data from file')
% IMU data % IMU data
IMU_data = importdata('KittiEquivBiasedImu.txt'); IMU_data = importdata(findExampleDataFile('KittiEquivBiasedImu.txt'));
IMU_data = cell2struct(num2cell(IMU_data.data), IMU_data.colheaders, 2); IMU_data = cell2struct(num2cell(IMU_data.data), IMU_data.colheaders, 2);
imum = cellfun(@(x) x', num2cell([ [IMU_data.accelX]' [IMU_data.accelY]' [IMU_data.accelZ]' [IMU_data.omegaX]' [IMU_data.omegaY]' [IMU_data.omegaZ]' ], 2), 'UniformOutput', false); imum = cellfun(@(x) x', num2cell([ [IMU_data.accelX]' [IMU_data.accelY]' [IMU_data.accelZ]' [IMU_data.omegaX]' [IMU_data.omegaY]' [IMU_data.omegaZ]' ], 2), 'UniformOutput', false);
[IMU_data.acc_omega] = deal(imum{:}); [IMU_data.acc_omega] = deal(imum{:});
@ -40,7 +40,7 @@ imum = cellfun(@(x) x', num2cell([ [IMU_data.accelX]' [IMU_data.accelY]' [IMU_da
clear imum clear imum
% VO data % VO data
VO_data = importdata('KittiRelativePose.txt'); VO_data = importdata(findExampleDataFile('KittiRelativePose.txt'));
VO_data = cell2struct(num2cell(VO_data.data), VO_data.colheaders, 2); VO_data = cell2struct(num2cell(VO_data.data), VO_data.colheaders, 2);
% Merge relative pose fields and convert to Pose3 % Merge relative pose fields and convert to Pose3
logposes = [ [VO_data.dtx]' [VO_data.dty]' [VO_data.dtz]' [VO_data.drx]' [VO_data.dry]' [VO_data.drz]' ]; logposes = [ [VO_data.dtx]' [VO_data.dty]' [VO_data.dtz]' [VO_data.drx]' [VO_data.dry]' [VO_data.drz]' ];
@ -71,7 +71,7 @@ clear logposes relposes
%% Get initial conditions for the estimated trajectory %% Get initial conditions for the estimated trajectory
% % % currentPoseGlobal = Pose3(Rot3, GPS_data(firstGPSPose).Position); % initial pose is the reference frame (navigation frame) % % % currentPoseGlobal = Pose3(Rot3, GPS_data(firstGPSPose).Position); % initial pose is the reference frame (navigation frame)
currentPoseGlobal = Pose3; currentPoseGlobal = Pose3;
currentVelocityGlobal = LieVector([0;0;0]); % the vehicle is stationary at the beginning currentVelocityGlobal = [0;0;0]; % the vehicle is stationary at the beginning
currentBias = imuBias.ConstantBias(zeros(3,1), zeros(3,1)); currentBias = imuBias.ConstantBias(zeros(3,1), zeros(3,1));
sigma_init_x = noiseModel.Isotropic.Sigma(6, 0.01); sigma_init_x = noiseModel.Isotropic.Sigma(6, 0.01);
sigma_init_v = noiseModel.Isotropic.Sigma(3, 1000.0); sigma_init_v = noiseModel.Isotropic.Sigma(3, 1000.0);
@ -120,7 +120,7 @@ for measurementIndex = 1:length(timestamps)
newValues.insert(currentVelKey, currentVelocityGlobal); newValues.insert(currentVelKey, currentVelocityGlobal);
newValues.insert(currentBiasKey, currentBias); newValues.insert(currentBiasKey, currentBias);
newFactors.add(PriorFactorPose3(currentPoseKey, currentPoseGlobal, sigma_init_x)); newFactors.add(PriorFactorPose3(currentPoseKey, currentPoseGlobal, sigma_init_x));
newFactors.add(PriorFactorLieVector(currentVelKey, currentVelocityGlobal, sigma_init_v)); newFactors.add(PriorFactorVector(currentVelKey, currentVelocityGlobal, sigma_init_v));
newFactors.add(PriorFactorConstantBias(currentBiasKey, currentBias, sigma_init_b)); newFactors.add(PriorFactorConstantBias(currentBiasKey, currentBias, sigma_init_b));
else else
t_previous = timestamps(measurementIndex-1, 1); t_previous = timestamps(measurementIndex-1, 1);

View File

@ -82,8 +82,8 @@ for ind_pose = 2:7
key_prev = poseKey(ind_pose-1); key_prev = poseKey(ind_pose-1);
key_curr = poseKey(ind_pose); key_curr = poseKey(ind_pose);
prev_pose = smartValues.at(key_prev); prev_pose = smartValues.atPose2(key_prev);
curr_pose = smartValues.at(key_curr); curr_pose = smartValues.atPose2(key_curr);
GTDeltaPose = prev_pose.between(curr_pose); GTDeltaPose = prev_pose.between(curr_pose);
noisePose = Pose2([t_noise*rand; t_noise*rand; r_noise*rand]); noisePose = Pose2([t_noise*rand; t_noise*rand; r_noise*rand]);
NoisyDeltaPose = GTDeltaPose.compose(noisePose); NoisyDeltaPose = GTDeltaPose.compose(noisePose);

View File

@ -96,17 +96,17 @@ for i=1:20
if i > 1 if i > 1
if i < 11 if i < 11
initial.insert(i,result.at(i-1).compose(move_forward)); initial.insert(i,result.atPose3(i-1).compose(move_forward));
fg.add(BetweenFactorPose3(i-1,i, move_forward, covariance)); fg.add(BetweenFactorPose3(i-1,i, move_forward, covariance));
else else
initial.insert(i,result.at(i-1).compose(move_circle)); initial.insert(i,result.atPose3(i-1).compose(move_circle));
fg.add(BetweenFactorPose3(i-1,i, move_circle, covariance)); fg.add(BetweenFactorPose3(i-1,i, move_circle, covariance));
end end
end end
% generate some camera measurements % generate some camera measurements
cam_pose = initial.at(i).compose(actual_transform); cam_pose = initial.atPose3(i).compose(actual_transform);
% gtsam.plotPose3(cam_pose); % gtsam.plotPose3(cam_pose);
cam = SimpleCamera(cam_pose,K); cam = SimpleCamera(cam_pose,K);
i i
@ -137,10 +137,10 @@ for i=1:20
hold on; hold on;
%% plot results %% plot results
result_camera_transform = result.at(transformKey); result_camera_transform = result.atPose3(transformKey);
for j=1:i for j=1:i
gtsam.plotPose3(result.at(j),[],0.5); gtsam.plotPose3(result.atPose3(j),[],0.5);
gtsam.plotPose3(result.at(j).compose(result_camera_transform),[],0.5); gtsam.plotPose3(result.atPose3(j).compose(result_camera_transform),[],0.5);
end end
xlabel('x (m)'); xlabel('x (m)');
@ -153,12 +153,12 @@ for i=1:20
% axis equal % axis equal
for l=101:100+nrPoints for l=101:100+nrPoints
plotPoint3(result.at(l),'g'); plotPoint3(result.atPoint3(l),'g');
end end
ty = result.at(transformKey).translation().y(); ty = result.atPose3(transformKey).translation().y();
fx = result.at(calibrationKey).fx(); fx = result.atCal3_S2(calibrationKey).fx();
fy = result.at(calibrationKey).fy(); fy = result.atCal3_S2(calibrationKey).fy();
text(1,5,5,sprintf('Y-Transform(0.0): %0.2f',ty)); text(1,5,5,sprintf('Y-Transform(0.0): %0.2f',ty));
text(1,5,3,sprintf('fx(900): %.0f',fx)); text(1,5,3,sprintf('fx(900): %.0f',fx));
text(1,5,1,sprintf('fy(900): %.0f',fy)); text(1,5,1,sprintf('fy(900): %.0f',fy));
@ -180,10 +180,10 @@ end
fprintf('Cheirality Exception count: %d\n', cheirality_exception_count); fprintf('Cheirality Exception count: %d\n', cheirality_exception_count);
disp('Transform after optimization'); disp('Transform after optimization');
result.at(transformKey) result.atPose3(transformKey)
disp('Calibration after optimization'); disp('Calibration after optimization');
result.at(calibrationKey) result.atCal3_S2(calibrationKey)

View File

@ -89,7 +89,7 @@ isam = ISAM2(isamParams);
currentIMUPoseGlobal = Pose3(); currentIMUPoseGlobal = Pose3();
%% Get initial conditions for the estimated trajectory %% Get initial conditions for the estimated trajectory
currentVelocityGlobal = LieVector([1;0;0]); % the vehicle is stationary at the beginning currentVelocityGlobal = [1;0;0]; % the vehicle is stationary at the beginning
currentBias = imuBias.ConstantBias(zeros(3,1), zeros(3,1)); currentBias = imuBias.ConstantBias(zeros(3,1), zeros(3,1));
sigma_init_v = noiseModel.Isotropic.Sigma(3, 1.0); sigma_init_v = noiseModel.Isotropic.Sigma(3, 1.0);
@ -127,7 +127,7 @@ for i=1:steps
fg.add(PriorFactorCal3_S2(calibrationKey,K_corrupt,K_cov)); fg.add(PriorFactorCal3_S2(calibrationKey,K_corrupt,K_cov));
% velocity and bias evolution % velocity and bias evolution
fg.add(PriorFactorLieVector(currentVelKey, currentVelocityGlobal, sigma_init_v)); fg.add(PriorFactorVector(currentVelKey, currentVelocityGlobal, sigma_init_v));
fg.add(PriorFactorConstantBias(currentBiasKey, currentBias, sigma_init_b)); fg.add(PriorFactorConstantBias(currentBiasKey, currentBias, sigma_init_b));
result = initial; result = initial;

View File

@ -61,17 +61,17 @@ cheirality_exception_count = 0;
for i=1:20 for i=1:20
if i > 1 if i > 1
if i < 11 if i < 11
initial.insert(i,initial.at(i-1).compose(move_forward)); initial.insert(i,initial.atPose3(i-1).compose(move_forward));
fg.add(BetweenFactorPose3(i-1,i, move_forward, covariance)); fg.add(BetweenFactorPose3(i-1,i, move_forward, covariance));
else else
initial.insert(i,initial.at(i-1).compose(move_circle)); initial.insert(i,initial.atPose3(i-1).compose(move_circle));
fg.add(BetweenFactorPose3(i-1,i, move_circle, covariance)); fg.add(BetweenFactorPose3(i-1,i, move_circle, covariance));
end end
end end
% generate some camera measurements % generate some camera measurements
cam_pose = initial.at(i).compose(actual_transform); cam_pose = initial.atPose3(i).compose(actual_transform);
gtsam.plotPose3(cam_pose); gtsam.plotPose3(cam_pose);
cam = SimpleCamera(cam_pose,K); cam = SimpleCamera(cam_pose,K);
i i
@ -93,14 +93,14 @@ fprintf('Cheirality Exception count: %d\n', cheirality_exception_count);
%% camera plotting %% camera plotting
for i=1:20 for i=1:20
gtsam.plotPose3(initial.at(i).compose(camera_transform)); gtsam.plotPose3(initial.atPose3(i).compose(camera_transform));
end end
xlabel('x (m)'); xlabel('x (m)');
ylabel('y (m)'); ylabel('y (m)');
disp('Transform before optimization'); disp('Transform before optimization');
initial.at(1000) initial.atPose3(1000)
params = LevenbergMarquardtParams; params = LevenbergMarquardtParams;
params.setAbsoluteErrorTol(1e-15); params.setAbsoluteErrorTol(1e-15);
@ -112,7 +112,7 @@ optimizer = LevenbergMarquardtOptimizer(fg, initial, params);
result = optimizer.optimizeSafely(); result = optimizer.optimizeSafely();
disp('Transform after optimization'); disp('Transform after optimization');
result.at(1000) result.atPose3(1000) % results is empty here. optimizer doesn't generate result?
axis([0 25 0 25 0 10]); axis([0 25 0 25 0 10]);
axis equal axis equal

View File

@ -86,17 +86,17 @@ for i=1:20
if i > 1 if i > 1
if i < 11 if i < 11
initial.insert(i,result.at(i-1).compose(move_forward)); initial.insert(i,result.atPose3(i-1).compose(move_forward));
fg.add(BetweenFactorPose3(i-1,i, move_forward, covariance)); fg.add(BetweenFactorPose3(i-1,i, move_forward, covariance));
else else
initial.insert(i,result.at(i-1).compose(move_circle)); initial.insert(i,result.atPose3(i-1).compose(move_circle));
fg.add(BetweenFactorPose3(i-1,i, move_circle, covariance)); fg.add(BetweenFactorPose3(i-1,i, move_circle, covariance));
end end
end end
% generate some camera measurements % generate some camera measurements
cam_pose = initial.at(i).compose(actual_transform); cam_pose = initial.atPose3(i).compose(actual_transform);
% gtsam.plotPose3(cam_pose); % gtsam.plotPose3(cam_pose);
cam = SimpleCamera(cam_pose,K); cam = SimpleCamera(cam_pose,K);
i i
@ -127,10 +127,10 @@ for i=1:20
hold on; hold on;
%% plot results %% plot results
result_camera_transform = result.at(1000); result_camera_transform = result.atPose3(1000);
for j=1:i for j=1:i
gtsam.plotPose3(result.at(j)); gtsam.plotPose3(result.atPose3(j));
gtsam.plotPose3(result.at(j).compose(result_camera_transform),[],0.5); gtsam.plotPose3(result.atPose3(j).compose(result_camera_transform),[],0.5);
end end
xlabel('x (m)'); xlabel('x (m)');
@ -143,10 +143,10 @@ for i=1:20
% axis equal % axis equal
for l=101:100+nrPoints for l=101:100+nrPoints
plotPoint3(result.at(l),'g'); plotPoint3(result.atPoint3(l),'g');
end end
ty = result.at(1000).translation().y(); ty = result.atPose3(1000).translation().y();
text(5,5,5,sprintf('Y-Transform: %0.2g',ty)); text(5,5,5,sprintf('Y-Transform: %0.2g',ty));
if(write_video) if(write_video)
@ -168,7 +168,7 @@ fprintf('Cheirality Exception count: %d\n', cheirality_exception_count);
disp('Transform after optimization'); disp('Transform after optimization');
result.at(1000) result.atPose3(1000)

View File

@ -23,7 +23,7 @@ z = zeros(1,nrMeasurements);
for i = 0:measurement_keys.size-1 for i = 0:measurement_keys.size-1
key = measurement_keys.at(i); key = measurement_keys.at(i);
key_index = gtsam.symbolIndex(key); key_index = gtsam.symbolIndex(key);
p = landmarks.at(gtsam.symbol('l',key_index)); p = landmarks.atPoint3(gtsam.symbol('l',key_index));
x(i+1) = p.x; x(i+1) = p.x;
y(i+1) = p.y; y(i+1) = p.y;

View File

@ -8,7 +8,7 @@ function [ measurements ] = project_landmarks( pose, landmarks, K )
measurements = Values; measurements = Values;
for i=1:size(landmarks)-1 for i=1:size(landmarks)-1
z = camera.project(landmarks.at(symbol('l',i))); z = camera.project(landmarks.atPoint3(symbol('l',i)));
% check bounding box % check bounding box
if z.x < 0 || z.x > 1280 if z.x < 0 || z.x > 1280

View File

@ -48,12 +48,12 @@ optimizer = LevenbergMarquardtOptimizer(graph, initial, params);
result = optimizer.optimize(); result = optimizer.optimize();
% Check result % Check result
CHECK('error',result.at(100).equals(b1,1e-5)) CHECK('error',result.atPose2(100).equals(b1,1e-5))
CHECK('error',result.at(10).equals(origin,1e-5)) CHECK('error',result.atPose2(10).equals(origin,1e-5))
CHECK('error',result.at(1).equals(Point2(0,1),1e-5)) CHECK('error',result.atPoint2(1).equals(Point2(0,1),1e-5))
CHECK('error',result.at(2).equals(Point2(0,1),1e-5)) CHECK('error',result.atPoint2(2).equals(Point2(0,1),1e-5))
CHECK('error',result.at(20).equals(origin,1e-5)) CHECK('error',result.atPose2(20).equals(origin,1e-5))
CHECK('error',result.at(200).equals(b2,1e-5)) CHECK('error',result.atPose2(200).equals(b2,1e-5))
% Check error % Check error
CHECK('error',abs(graph.error(result))<1e-9) CHECK('error',abs(graph.error(result))<1e-9)

View File

@ -78,11 +78,11 @@ void Argument::matlab_unwrap(FileWriter& file, const string& matlabName) const {
string cppType = type.qualifiedName("::"); string cppType = type.qualifiedName("::");
string matlabUniqueType = type.qualifiedName(); string matlabUniqueType = type.qualifiedName();
if (is_ptr) if (is_ptr && type.category != Qualified::EIGEN)
// A pointer: emit an "unwrap_shared_ptr" call which returns a pointer // A pointer: emit an "unwrap_shared_ptr" call which returns a pointer
file.oss << "boost::shared_ptr<" << cppType << "> " << name file.oss << "boost::shared_ptr<" << cppType << "> " << name
<< " = unwrap_shared_ptr< "; << " = unwrap_shared_ptr< ";
else if (is_ref) else if (is_ref && type.category != Qualified::EIGEN)
// A reference: emit an "unwrap_shared_ptr" call and de-reference the pointer // A reference: emit an "unwrap_shared_ptr" call and de-reference the pointer
file.oss << cppType << "& " << name << " = *unwrap_shared_ptr< "; file.oss << cppType << "& " << name << " = *unwrap_shared_ptr< ";
else else
@ -94,7 +94,7 @@ void Argument::matlab_unwrap(FileWriter& file, const string& matlabName) const {
file.oss << cppType << " " << name << " = unwrap< "; file.oss << cppType << " " << name << " = unwrap< ";
file.oss << cppType << " >(" << matlabName; file.oss << cppType << " >(" << matlabName;
if (is_ptr || is_ref) if( (is_ptr || is_ref) && type.category != Qualified::EIGEN)
file.oss << ", \"ptr_" << matlabUniqueType << "\""; file.oss << ", \"ptr_" << matlabUniqueType << "\"";
file.oss << ");" << endl; file.oss << ");" << endl;
} }

View File

@ -425,3 +425,20 @@ boost::shared_ptr<Class> unwrap_shared_ptr(const mxArray* obj, const string& pro
boost::shared_ptr<Class>* spp = *reinterpret_cast<boost::shared_ptr<Class>**> (mxGetData(mxh)); boost::shared_ptr<Class>* spp = *reinterpret_cast<boost::shared_ptr<Class>**> (mxGetData(mxh));
return *spp; return *spp;
} }
//// throw an error if unwrap_shared_ptr is attempted for an Eigen Vector
//template <>
//Vector unwrap_shared_ptr<Vector>(const mxArray* obj, const string& propertyName) {
// bool unwrap_shared_ptr_Vector_attempted = false;
// BOOST_STATIC_ASSERT(unwrap_shared_ptr_Vector_attempted, "Vector cannot be unwrapped as a shared pointer");
// return Vector();
//}
//// throw an error if unwrap_shared_ptr is attempted for an Eigen Matrix
//template <>
//Matrix unwrap_shared_ptr<Matrix>(const mxArray* obj, const string& propertyName) {
// bool unwrap_shared_ptr_Matrix_attempted = false;
// BOOST_STATIC_ASSERT(unwrap_shared_ptr_Matrix_attempted, "Matrix cannot be unwrapped as a shared pointer");
// return Matrix();
//}

View File

@ -679,7 +679,7 @@ void MyTemplatePoint2_templatedMethod_55(int nargout, mxArray *out[], int nargin
typedef boost::shared_ptr<MyTemplatePoint2> Shared; typedef boost::shared_ptr<MyTemplatePoint2> Shared;
checkArguments("templatedMethodMatrix",nargout,nargin-1,1); checkArguments("templatedMethodMatrix",nargout,nargin-1,1);
Shared obj = unwrap_shared_ptr<MyTemplatePoint2>(in[0], "ptr_MyTemplatePoint2"); Shared obj = unwrap_shared_ptr<MyTemplatePoint2>(in[0], "ptr_MyTemplatePoint2");
Matrix& t = *unwrap_shared_ptr< Matrix >(in[1], "ptr_Matrix"); Matrix& t = unwrap< Matrix >(in[1]);
out[0] = wrap< Matrix >(obj->templatedMethod<Matrix>(t)); out[0] = wrap< Matrix >(obj->templatedMethod<Matrix>(t));
} }

View File

@ -160,6 +160,85 @@ TEST( Class, Grammar ) {
EXPECT_LONGS_EQUAL(ReturnType::EIGEN, rv4.type1.category); EXPECT_LONGS_EQUAL(ReturnType::EIGEN, rv4.type1.category);
} }
//******************************************************************************
TEST( Class, TemplateSubstition ) {
using classic::space_p;
// Create type grammar that will place result in cls
Class cls;
Template t;
ClassGrammar g(cls, t);
string markup(
string("template<T = {void, double, Matrix, Point3}> class Point2 { \n")
+ string(" T myMethod(const T& t) const; \n")
+ string("};"));
EXPECT(parse(markup.c_str(), g, space_p).full);
// Method 2
Method m2 = cls.method("myMethod");
EXPECT(assert_equal("myMethod", m2.name()));
EXPECT(m2.isConst());
LONGS_EQUAL(1, m2.nrOverloads());
ReturnValue rv2 = m2.returnValue(0);
EXPECT(!rv2.isPair);
EXPECT(!rv2.type1.isPtr);
EXPECT(assert_equal("T", rv2.type1.name()));
EXPECT_LONGS_EQUAL(ReturnType::CLASS, rv2.type1.category);
EXPECT_LONGS_EQUAL(4, t.nrValues());
EXPECT(t.argName()=="T");
EXPECT(t[0]==Qualified("void",Qualified::VOID));
EXPECT(t[1]==Qualified("double",Qualified::BASIS));
EXPECT(t[2]==Qualified("Matrix",Qualified::EIGEN));
EXPECT(t[3]==Qualified("Point3",Qualified::CLASS));
vector<Class> classes = cls.expandTemplate(t.argName(),
t.argValues());
// check the number of new classes is four
EXPECT_LONGS_EQUAL(4, classes.size());
// check return types
EXPECT(classes[0].method("myMethod").returnValue(0).type1 == Qualified("void",Qualified::VOID));
EXPECT(classes[1].method("myMethod").returnValue(0).type1 == Qualified("double",Qualified::BASIS));
EXPECT(classes[2].method("myMethod").returnValue(0).type1 == Qualified("Matrix",Qualified::EIGEN));
EXPECT(classes[3].method("myMethod").returnValue(0).type1 == Qualified("Point3",Qualified::CLASS));
// check the argument types
EXPECT(classes[0].method("myMethod").argumentList(0)[0].type == Qualified("void",Qualified::VOID));
EXPECT(classes[1].method("myMethod").argumentList(0)[0].type == Qualified("double",Qualified::BASIS));
EXPECT(classes[2].method("myMethod").argumentList(0)[0].type == Qualified("Matrix",Qualified::EIGEN));
EXPECT(classes[3].method("myMethod").argumentList(0)[0].type == Qualified("Point3",Qualified::CLASS));
}
TEST(Class, Template) {
using classic::space_p;
// Create type grammar that will place result in cls
Class cls;
Template t;
ClassGrammar g(cls, t);
string markup(
string("template<T = {Vector, Matrix}>"
" virtual class PriorFactor : gtsam::NoiseModelFactor {"
" PriorFactor(size_t key, const T& prior, const gtsam::noiseModel::Base* noiseModel); "
" T prior() const; "
" void serialize() const; "
"};" ));
EXPECT(parse(markup.c_str(), g, space_p).full);
}
/* ************************************************************************* */ /* ************************************************************************* */
int main() { int main() {
TestResult tr; TestResult tr;