Use EstimateGravityOrientation() in 2D SLAM. (#499)

master
Wolfgang Hess 2017-09-04 11:14:51 +02:00 committed by GitHub
parent e3b6f0afc5
commit 3948943b64
2 changed files with 45 additions and 57 deletions

View File

@ -37,10 +37,10 @@ LocalTrajectoryBuilder::LocalTrajectoryBuilder(
LocalTrajectoryBuilder::~LocalTrajectoryBuilder() {} LocalTrajectoryBuilder::~LocalTrajectoryBuilder() {}
sensor::RangeData LocalTrajectoryBuilder::TransformAndFilterRangeData( sensor::RangeData LocalTrajectoryBuilder::TransformAndFilterRangeData(
const transform::Rigid3f& tracking_to_tracking_2d, const transform::Rigid3f& gravity_alignment,
const sensor::RangeData& range_data) const { const sensor::RangeData& range_data) const {
const sensor::RangeData cropped = sensor::CropRangeData( const sensor::RangeData cropped = sensor::CropRangeData(
sensor::TransformRangeData(range_data, tracking_to_tracking_2d), sensor::TransformRangeData(range_data, gravity_alignment),
options_.min_z(), options_.max_z()); options_.min_z(), options_.max_z());
return sensor::RangeData{ return sensor::RangeData{
cropped.origin, cropped.origin,
@ -49,36 +49,28 @@ sensor::RangeData LocalTrajectoryBuilder::TransformAndFilterRangeData(
} }
void LocalTrajectoryBuilder::ScanMatch( void LocalTrajectoryBuilder::ScanMatch(
common::Time time, const transform::Rigid3d& pose_prediction, const common::Time time, const transform::Rigid2d& pose_prediction,
const transform::Rigid3d& tracking_to_tracking_2d, const sensor::RangeData& gravity_aligned_range_data,
const sensor::RangeData& range_data_in_tracking_2d, transform::Rigid2d* const pose_observation) {
transform::Rigid3d* pose_observation) {
std::shared_ptr<const Submap> matching_submap = std::shared_ptr<const Submap> matching_submap =
active_submaps_.submaps().front(); active_submaps_.submaps().front();
transform::Rigid2d pose_prediction_2d =
transform::Project2D(pose_prediction * tracking_to_tracking_2d.inverse());
// The online correlative scan matcher will refine the initial estimate for // The online correlative scan matcher will refine the initial estimate for
// the Ceres scan matcher. // the Ceres scan matcher.
transform::Rigid2d initial_ceres_pose = pose_prediction_2d; transform::Rigid2d initial_ceres_pose = pose_prediction;
sensor::AdaptiveVoxelFilter adaptive_voxel_filter( sensor::AdaptiveVoxelFilter adaptive_voxel_filter(
options_.adaptive_voxel_filter_options()); options_.adaptive_voxel_filter_options());
const sensor::PointCloud filtered_point_cloud_in_tracking_2d = const sensor::PointCloud filtered_gravity_aligned_point_cloud =
adaptive_voxel_filter.Filter(range_data_in_tracking_2d.returns); adaptive_voxel_filter.Filter(gravity_aligned_range_data.returns);
if (options_.use_online_correlative_scan_matching()) { if (options_.use_online_correlative_scan_matching()) {
real_time_correlative_scan_matcher_.Match( real_time_correlative_scan_matcher_.Match(
pose_prediction_2d, filtered_point_cloud_in_tracking_2d, pose_prediction, filtered_gravity_aligned_point_cloud,
matching_submap->probability_grid(), &initial_ceres_pose); matching_submap->probability_grid(), &initial_ceres_pose);
} }
transform::Rigid2d tracking_2d_to_map;
ceres::Solver::Summary summary; ceres::Solver::Summary summary;
ceres_scan_matcher_.Match(pose_prediction_2d, initial_ceres_pose, ceres_scan_matcher_.Match(
filtered_point_cloud_in_tracking_2d, pose_prediction, initial_ceres_pose, filtered_gravity_aligned_point_cloud,
matching_submap->probability_grid(), matching_submap->probability_grid(), pose_observation, &summary);
&tracking_2d_to_map, &summary);
*pose_observation =
transform::Embed3D(tracking_2d_to_map) * tracking_to_tracking_2d;
} }
std::unique_ptr<LocalTrajectoryBuilder::InsertionResult> std::unique_ptr<LocalTrajectoryBuilder::InsertionResult>
@ -135,40 +127,37 @@ LocalTrajectoryBuilder::AddRangeData(const common::Time time,
std::unique_ptr<LocalTrajectoryBuilder::InsertionResult> std::unique_ptr<LocalTrajectoryBuilder::InsertionResult>
LocalTrajectoryBuilder::AddAccumulatedRangeData( LocalTrajectoryBuilder::AddAccumulatedRangeData(
const common::Time time, const sensor::RangeData& range_data) { const common::Time time, const sensor::RangeData& range_data) {
const transform::Rigid3d pose_prediction = // Transforms 'range_data' into a frame where gravity direction is
extrapolator_->ExtrapolatePose(time); // approximately +z.
const transform::Rigid3d gravity_alignment = transform::Rigid3d::Rotation(
// Computes the rotation without yaw, as defined by GetYaw(). extrapolator_->EstimateGravityOrientation(time));
const transform::Rigid3d tracking_to_tracking_2d = const sensor::RangeData gravity_aligned_range_data =
transform::Rigid3d::Rotation( TransformAndFilterRangeData(gravity_alignment.cast<float>(), range_data);
Eigen::Quaterniond(Eigen::AngleAxisd( if (gravity_aligned_range_data.returns.empty()) {
-transform::GetYaw(pose_prediction), Eigen::Vector3d::UnitZ())) *
pose_prediction.rotation());
const sensor::RangeData range_data_in_tracking_2d =
TransformAndFilterRangeData(tracking_to_tracking_2d.cast<float>(),
range_data);
if (range_data_in_tracking_2d.returns.empty()) {
LOG(WARNING) << "Dropped empty horizontal range data."; LOG(WARNING) << "Dropped empty horizontal range data.";
return nullptr; return nullptr;
} }
transform::Rigid3d pose_estimate; // Computes a gravity aligned pose prediction.
ScanMatch(time, pose_prediction, tracking_to_tracking_2d, const transform::Rigid3d non_gravity_aligned_pose_prediction =
range_data_in_tracking_2d, &pose_estimate); extrapolator_->ExtrapolatePose(time);
const transform::Rigid2d pose_prediction = transform::Project2D(
non_gravity_aligned_pose_prediction * gravity_alignment.inverse());
transform::Rigid2d pose_estimate_2d;
ScanMatch(time, pose_prediction, gravity_aligned_range_data,
&pose_estimate_2d);
const transform::Rigid3d pose_estimate =
transform::Embed3D(pose_estimate_2d) * gravity_alignment;
extrapolator_->AddPose(time, pose_estimate); extrapolator_->AddPose(time, pose_estimate);
const transform::Rigid3d tracking_2d_to_map =
pose_estimate * tracking_to_tracking_2d.inverse();
last_pose_estimate_ = { last_pose_estimate_ = {
time, pose_estimate, time, pose_estimate,
sensor::TransformPointCloud(range_data_in_tracking_2d.returns, sensor::TransformPointCloud(
tracking_2d_to_map.cast<float>())}; gravity_aligned_range_data.returns,
transform::Embed3D(pose_estimate_2d.cast<float>()))};
const transform::Rigid2d pose_estimate_2d = if (motion_filter_.IsSimilar(time, pose_estimate)) {
transform::Project2D(tracking_2d_to_map);
if (motion_filter_.IsSimilar(time, transform::Embed3D(pose_estimate_2d))) {
return nullptr; return nullptr;
} }
@ -179,23 +168,23 @@ LocalTrajectoryBuilder::AddAccumulatedRangeData(
insertion_submaps.push_back(submap); insertion_submaps.push_back(submap);
} }
active_submaps_.InsertRangeData( active_submaps_.InsertRangeData(
TransformRangeData(range_data_in_tracking_2d, TransformRangeData(gravity_aligned_range_data,
transform::Embed3D(pose_estimate_2d.cast<float>()))); transform::Embed3D(pose_estimate_2d.cast<float>())));
sensor::AdaptiveVoxelFilter adaptive_voxel_filter( sensor::AdaptiveVoxelFilter adaptive_voxel_filter(
options_.loop_closure_adaptive_voxel_filter_options()); options_.loop_closure_adaptive_voxel_filter_options());
const sensor::PointCloud filtered_point_cloud_in_tracking_2d = const sensor::PointCloud filtered_gravity_aligned_point_cloud =
adaptive_voxel_filter.Filter(range_data_in_tracking_2d.returns); adaptive_voxel_filter.Filter(gravity_aligned_range_data.returns);
return common::make_unique<InsertionResult>(InsertionResult{ return common::make_unique<InsertionResult>(InsertionResult{
std::make_shared<const mapping::TrajectoryNode::Data>( std::make_shared<const mapping::TrajectoryNode::Data>(
mapping::TrajectoryNode::Data{ mapping::TrajectoryNode::Data{
time, time,
{}, // 'range_data' is only used in 3D. {}, // 'range_data' is only used in 3D.
filtered_point_cloud_in_tracking_2d, filtered_gravity_aligned_point_cloud,
{}, // 'high_resolution_point_cloud' is only used in 3D. {}, // 'high_resolution_point_cloud' is only used in 3D.
{}, // 'low_resolution_point_cloud' is only used in 3D. {}, // 'low_resolution_point_cloud' is only used in 3D.
tracking_to_tracking_2d}), gravity_alignment}),
pose_estimate_2d, std::move(insertion_submaps)}); pose_estimate_2d, std::move(insertion_submaps)});
} }

View File

@ -65,15 +65,14 @@ class LocalTrajectoryBuilder {
std::unique_ptr<InsertionResult> AddAccumulatedRangeData( std::unique_ptr<InsertionResult> AddAccumulatedRangeData(
common::Time time, const sensor::RangeData& range_data); common::Time time, const sensor::RangeData& range_data);
sensor::RangeData TransformAndFilterRangeData( sensor::RangeData TransformAndFilterRangeData(
const transform::Rigid3f& tracking_to_tracking_2d, const transform::Rigid3f& gravity_alignment,
const sensor::RangeData& range_data) const; const sensor::RangeData& range_data) const;
// Scan matches 'range_data_in_tracking_2d' and fill in the 'pose_observation' // Scan matches 'gravity_aligned_range_data' and fill in the
// with the result. // 'pose_observation' with the result.
void ScanMatch(common::Time time, const transform::Rigid3d& pose_prediction, void ScanMatch(common::Time time, const transform::Rigid2d& pose_prediction,
const transform::Rigid3d& tracking_to_tracking_2d, const sensor::RangeData& gravity_aligned_range_data,
const sensor::RangeData& range_data_in_tracking_2d, transform::Rigid2d* pose_observation);
transform::Rigid3d* pose_observation);
// Lazily constructs a PoseExtrapolator. // Lazily constructs a PoseExtrapolator.
void InitializeExtrapolator(common::Time time); void InitializeExtrapolator(common::Time time);