Introduce RangeDataCollator (#975)
Synchronizes and merge-sorts TimedPointCloudData by per-point timestamps from multiple sensors. [RFC=0017](https://github.com/googlecartographer/rfcs/blob/master/text/0017-synchronize-points.md)master
parent
976736051c
commit
1187480fe6
|
@ -0,0 +1,119 @@
|
|||
/*
|
||||
* Copyright 2018 The Cartographer Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#include "cartographer/mapping/internal/range_data_collator.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "cartographer/common/make_unique.h"
|
||||
#include "cartographer/mapping/local_slam_result_data.h"
|
||||
#include "glog/logging.h"
|
||||
|
||||
namespace cartographer {
|
||||
namespace mapping {
|
||||
|
||||
sensor::TimedPointCloudOriginData RangeDataCollator::AddRangeData(
|
||||
const std::string& sensor_id,
|
||||
const sensor::TimedPointCloudData& timed_point_cloud_data) {
|
||||
CHECK(expected_sensor_ids_.count(sensor_id) != 0);
|
||||
// TODO(gaschler): These two cases can probably be one.
|
||||
if (id_to_pending_data_.count(sensor_id) != 0) {
|
||||
current_start_ = current_end_;
|
||||
// When we have two messages of the same sensor, move forward the older of
|
||||
// the two (do not send out current).
|
||||
current_end_ = id_to_pending_data_.at(sensor_id).time;
|
||||
auto result = CropAndMerge();
|
||||
id_to_pending_data_.emplace(sensor_id, timed_point_cloud_data);
|
||||
return result;
|
||||
}
|
||||
id_to_pending_data_.emplace(sensor_id, timed_point_cloud_data);
|
||||
if (expected_sensor_ids_.size() != id_to_pending_data_.size()) {
|
||||
return {};
|
||||
}
|
||||
current_start_ = current_end_;
|
||||
// We have messages from all sensors, move forward to oldest.
|
||||
common::Time oldest_timestamp = common::Time::max();
|
||||
for (const auto& pair : id_to_pending_data_) {
|
||||
oldest_timestamp = std::min(oldest_timestamp, pair.second.time);
|
||||
}
|
||||
current_end_ = oldest_timestamp;
|
||||
return CropAndMerge();
|
||||
}
|
||||
|
||||
sensor::TimedPointCloudOriginData RangeDataCollator::CropAndMerge() {
|
||||
sensor::TimedPointCloudOriginData result{current_end_, {}, {}};
|
||||
bool warned_for_dropped_points = false;
|
||||
for (auto it = id_to_pending_data_.begin();
|
||||
it != id_to_pending_data_.end();) {
|
||||
sensor::TimedPointCloudData& data = it->second;
|
||||
sensor::TimedPointCloud& ranges = it->second.ranges;
|
||||
|
||||
auto overlap_begin = ranges.begin();
|
||||
while (overlap_begin < ranges.end() &&
|
||||
data.time + common::FromSeconds((*overlap_begin)[3]) <
|
||||
current_start_) {
|
||||
++overlap_begin;
|
||||
}
|
||||
auto overlap_end = overlap_begin;
|
||||
while (overlap_end < ranges.end() &&
|
||||
data.time + common::FromSeconds((*overlap_end)[3]) <= current_end_) {
|
||||
++overlap_end;
|
||||
}
|
||||
if (ranges.begin() < overlap_begin && !warned_for_dropped_points) {
|
||||
LOG(WARNING) << "Dropped " << std::distance(ranges.begin(), overlap_begin)
|
||||
<< " earlier points.";
|
||||
warned_for_dropped_points = true;
|
||||
}
|
||||
|
||||
// Copy overlapping range.
|
||||
if (overlap_begin < overlap_end) {
|
||||
std::size_t origin_index = result.origins.size();
|
||||
result.origins.push_back(data.origin);
|
||||
double time_correction = common::ToSeconds(data.time - current_end_);
|
||||
for (auto overlap_it = overlap_begin; overlap_it != overlap_end;
|
||||
++overlap_it) {
|
||||
sensor::TimedPointCloudOriginData::RangeMeasurement point{*overlap_it,
|
||||
origin_index};
|
||||
// current_end_ + point_time[3]_after == in_timestamp +
|
||||
// point_time[3]_before
|
||||
point.point_time[3] += time_correction;
|
||||
result.ranges.push_back(point);
|
||||
}
|
||||
}
|
||||
|
||||
// Drop buffered points until overlap_end.
|
||||
if (overlap_end == ranges.begin()) {
|
||||
++it;
|
||||
} else if (overlap_end == ranges.end()) {
|
||||
it = id_to_pending_data_.erase(it);
|
||||
} else {
|
||||
data = sensor::TimedPointCloudData{
|
||||
data.time, data.origin,
|
||||
sensor::TimedPointCloud(overlap_end, ranges.end())};
|
||||
++it;
|
||||
}
|
||||
}
|
||||
|
||||
std::sort(result.ranges.begin(), result.ranges.end(),
|
||||
[](const sensor::TimedPointCloudOriginData::RangeMeasurement& a,
|
||||
const sensor::TimedPointCloudOriginData::RangeMeasurement& b) {
|
||||
return a.point_time[3] < b.point_time[3];
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
} // namespace mapping
|
||||
} // namespace cartographer
|
|
@ -0,0 +1,57 @@
|
|||
/*
|
||||
* Copyright 2018 The Cartographer Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef CARTOGRAPHER_MAPPING_INTERNAL_RANGE_DATA_COLLATOR_H_
|
||||
#define CARTOGRAPHER_MAPPING_INTERNAL_RANGE_DATA_COLLATOR_H_
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "cartographer/common/make_unique.h"
|
||||
#include "cartographer/sensor/timed_point_cloud_data.h"
|
||||
|
||||
namespace cartographer {
|
||||
namespace mapping {
|
||||
|
||||
// Synchronizes TimedPointCloudData from different sensors. Input needs only be
|
||||
// monotonous in 'TimedPointCloudData::time', output is monotonous in per-point
|
||||
// timing. Up to one message per sensor is buffered, so a delay of the period of
|
||||
// the slowest sensor may be introduced, which can be alleviated by passing
|
||||
// subdivisions.
|
||||
class RangeDataCollator {
|
||||
public:
|
||||
explicit RangeDataCollator(
|
||||
const std::vector<std::string>& expected_range_sensor_ids)
|
||||
: expected_sensor_ids_(expected_range_sensor_ids.begin(),
|
||||
expected_range_sensor_ids.end()) {}
|
||||
|
||||
sensor::TimedPointCloudOriginData AddRangeData(
|
||||
const std::string& sensor_id,
|
||||
const sensor::TimedPointCloudData& timed_point_cloud_data);
|
||||
|
||||
private:
|
||||
sensor::TimedPointCloudOriginData CropAndMerge();
|
||||
|
||||
const std::set<std::string> expected_sensor_ids_;
|
||||
// Store at most one message for each sensor.
|
||||
std::map<std::string, sensor::TimedPointCloudData> id_to_pending_data_;
|
||||
common::Time current_start_ = common::Time::min();
|
||||
common::Time current_end_ = common::Time::min();
|
||||
};
|
||||
|
||||
} // namespace mapping
|
||||
} // namespace cartographer
|
||||
|
||||
#endif // CARTOGRAPHER_MAPPING_INTERNAL_RANGE_DATA_COLLATOR_H_
|
|
@ -0,0 +1,143 @@
|
|||
/*
|
||||
* Copyright 2018 The Cartographer Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#include "cartographer/mapping/internal/range_data_collator.h"
|
||||
|
||||
#include "cartographer/common/time.h"
|
||||
#include "gmock/gmock.h"
|
||||
#include "gtest/gtest.h"
|
||||
|
||||
namespace cartographer {
|
||||
namespace mapping {
|
||||
namespace {
|
||||
|
||||
const int kNumSamples = 10;
|
||||
|
||||
sensor::TimedPointCloudData CreateFakeRangeData(int from, int to) {
|
||||
double duration = common::ToSeconds(common::FromUniversal(to) -
|
||||
common::FromUniversal(from));
|
||||
sensor::TimedPointCloudData result{common::FromUniversal(to),
|
||||
Eigen::Vector3f(0., 1., 2.),
|
||||
sensor::TimedPointCloud(kNumSamples)};
|
||||
for (int i = 0; i < kNumSamples; ++i) {
|
||||
double fraction = static_cast<double>(i) / (kNumSamples - 1);
|
||||
double relative_time = (1.f - fraction) * -duration;
|
||||
result.ranges[i] = Eigen::Vector4f(1., 2., 3., relative_time);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
bool ArePointTimestampsSorted(const sensor::TimedPointCloudOriginData& data) {
|
||||
std::vector<float> timestamps;
|
||||
timestamps.reserve(data.ranges.size());
|
||||
for (const auto& range : data.ranges) {
|
||||
timestamps.push_back(range.point_time[3]);
|
||||
}
|
||||
return std::is_sorted(timestamps.begin(), timestamps.end());
|
||||
}
|
||||
|
||||
TEST(RangeDataCollatorTest, SingleSensor) {
|
||||
const std::string sensor_id = "single_sensor";
|
||||
RangeDataCollator collator({sensor_id});
|
||||
auto output_0 =
|
||||
collator.AddRangeData(sensor_id, CreateFakeRangeData(200, 300));
|
||||
EXPECT_EQ(common::ToUniversal(output_0.time), 300);
|
||||
EXPECT_EQ(output_0.origins.size(), 1);
|
||||
EXPECT_EQ(output_0.ranges.size(), kNumSamples);
|
||||
EXPECT_TRUE(ArePointTimestampsSorted(output_0));
|
||||
auto output_1 =
|
||||
collator.AddRangeData(sensor_id, CreateFakeRangeData(300, 500));
|
||||
EXPECT_EQ(common::ToUniversal(output_1.time), 500);
|
||||
EXPECT_EQ(output_1.origins.size(), 1);
|
||||
ASSERT_EQ(output_1.ranges.size(), kNumSamples);
|
||||
EXPECT_TRUE(ArePointTimestampsSorted(output_1));
|
||||
EXPECT_NEAR(common::ToUniversal(
|
||||
output_1.time +
|
||||
common::FromSeconds(output_1.ranges[0].point_time[3])),
|
||||
300, 2);
|
||||
auto output_2 =
|
||||
collator.AddRangeData(sensor_id, CreateFakeRangeData(-1000, 510));
|
||||
EXPECT_EQ(common::ToUniversal(output_2.time), 510);
|
||||
EXPECT_EQ(output_2.origins.size(), 1);
|
||||
EXPECT_EQ(output_2.ranges.size(), 1);
|
||||
EXPECT_EQ(output_2.ranges[0].point_time[3], 0.f);
|
||||
EXPECT_TRUE(ArePointTimestampsSorted(output_2));
|
||||
}
|
||||
|
||||
TEST(RangeDataCollatorTest, TwoSensors) {
|
||||
const std::string sensor_0 = "sensor_0";
|
||||
const std::string sensor_1 = "sensor_1";
|
||||
RangeDataCollator collator({sensor_0, sensor_1});
|
||||
auto output_0 =
|
||||
collator.AddRangeData(sensor_0, CreateFakeRangeData(200, 300));
|
||||
EXPECT_EQ(output_0.ranges.size(), 0);
|
||||
auto output_1 =
|
||||
collator.AddRangeData(sensor_1, CreateFakeRangeData(-1000, 310));
|
||||
EXPECT_EQ(output_1.origins.size(), 2);
|
||||
EXPECT_EQ(common::ToUniversal(output_1.time), 300);
|
||||
ASSERT_EQ(output_1.ranges.size(), 2 * kNumSamples - 1);
|
||||
EXPECT_NEAR(common::ToUniversal(
|
||||
output_1.time +
|
||||
common::FromSeconds(output_1.ranges[0].point_time[3])),
|
||||
-1000, 2);
|
||||
EXPECT_EQ(output_1.ranges.back().point_time[3], 0.f);
|
||||
EXPECT_TRUE(ArePointTimestampsSorted(output_1));
|
||||
auto output_2 =
|
||||
collator.AddRangeData(sensor_0, CreateFakeRangeData(300, 500));
|
||||
EXPECT_EQ(output_2.origins.size(), 2);
|
||||
EXPECT_EQ(common::ToUniversal(output_2.time), 310);
|
||||
ASSERT_EQ(output_2.ranges.size(), 2);
|
||||
EXPECT_NEAR(common::ToUniversal(
|
||||
output_2.time +
|
||||
common::FromSeconds(output_2.ranges[0].point_time[3])),
|
||||
300, 2);
|
||||
EXPECT_EQ(output_2.ranges.back().point_time[3], 0.f);
|
||||
EXPECT_TRUE(ArePointTimestampsSorted(output_2));
|
||||
// Sending the same sensor will flush everything before.
|
||||
auto output_3 =
|
||||
collator.AddRangeData(sensor_0, CreateFakeRangeData(600, 700));
|
||||
EXPECT_EQ(common::ToUniversal(output_3.time), 500);
|
||||
EXPECT_EQ(
|
||||
output_1.ranges.size() + output_2.ranges.size() + output_3.ranges.size(),
|
||||
3 * kNumSamples);
|
||||
EXPECT_EQ(output_3.ranges.back().point_time[3], 0.f);
|
||||
EXPECT_TRUE(ArePointTimestampsSorted(output_3));
|
||||
}
|
||||
|
||||
TEST(RangeDataCollatorTest, ThreeSensors) {
|
||||
const std::string sensor_0 = "sensor_0";
|
||||
const std::string sensor_1 = "sensor_1";
|
||||
const std::string sensor_2 = "sensor_2";
|
||||
RangeDataCollator collator({sensor_0, sensor_1, sensor_2});
|
||||
auto output_0 =
|
||||
collator.AddRangeData(sensor_0, CreateFakeRangeData(100, 200));
|
||||
EXPECT_EQ(output_0.ranges.size(), 0);
|
||||
auto output_1 =
|
||||
collator.AddRangeData(sensor_1, CreateFakeRangeData(199, 250));
|
||||
EXPECT_EQ(output_1.ranges.size(), 0);
|
||||
auto output_2 =
|
||||
collator.AddRangeData(sensor_2, CreateFakeRangeData(210, 300));
|
||||
EXPECT_EQ(output_2.ranges.size(), kNumSamples + 1);
|
||||
EXPECT_TRUE(ArePointTimestampsSorted(output_2));
|
||||
auto output_3 =
|
||||
collator.AddRangeData(sensor_2, CreateFakeRangeData(400, 500));
|
||||
EXPECT_EQ(output_2.ranges.size() + output_3.ranges.size(), 3 * kNumSamples);
|
||||
EXPECT_TRUE(ArePointTimestampsSorted(output_3));
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace mapping
|
||||
} // namespace cartographer
|
|
@ -30,6 +30,16 @@ struct TimedPointCloudData {
|
|||
sensor::TimedPointCloud ranges;
|
||||
};
|
||||
|
||||
struct TimedPointCloudOriginData {
|
||||
struct RangeMeasurement {
|
||||
Eigen::Vector4f point_time;
|
||||
size_t origin_index;
|
||||
};
|
||||
common::Time time;
|
||||
std::vector<Eigen::Vector3f> origins;
|
||||
std::vector<RangeMeasurement> ranges;
|
||||
};
|
||||
|
||||
// Converts 'timed_point_cloud_data' to a proto::TimedPointCloudData.
|
||||
proto::TimedPointCloudData ToProto(
|
||||
const TimedPointCloudData& timed_point_cloud_data);
|
||||
|
|
Loading…
Reference in New Issue