Rename the scan queue. (#505)

This queue does not only contain scans, but also other sensor data,
e.g. from an IMU.
master
Wolfgang Hess 2017-09-07 14:04:00 +02:00 committed by GitHub
parent 22f41d8e37
commit 333516d955
4 changed files with 34 additions and 34 deletions

View File

@ -48,7 +48,7 @@ SparsePoseGraph::SparsePoseGraph(
SparsePoseGraph::~SparsePoseGraph() { SparsePoseGraph::~SparsePoseGraph() {
WaitForAllComputations(); WaitForAllComputations();
common::MutexLocker locker(&mutex_); common::MutexLocker locker(&mutex_);
CHECK(scan_queue_ == nullptr); CHECK(work_queue_ == nullptr);
} }
std::vector<mapping::SubmapId> SparsePoseGraph::GrowSubmapTransformsAsNeeded( std::vector<mapping::SubmapId> SparsePoseGraph::GrowSubmapTransformsAsNeeded(
@ -140,10 +140,10 @@ void SparsePoseGraph::AddScan(
} }
void SparsePoseGraph::AddWorkItem(const std::function<void()>& work_item) { void SparsePoseGraph::AddWorkItem(const std::function<void()>& work_item) {
if (scan_queue_ == nullptr) { if (work_queue_ == nullptr) {
work_item(); work_item();
} else { } else {
scan_queue_->push_back(work_item); work_queue_->push_back(work_item);
} }
} }
@ -298,15 +298,15 @@ void SparsePoseGraph::ComputeConstraintsForScan(
num_scans_since_last_loop_closure_ > options_.optimize_every_n_scans()) { num_scans_since_last_loop_closure_ > options_.optimize_every_n_scans()) {
CHECK(!run_loop_closure_); CHECK(!run_loop_closure_);
run_loop_closure_ = true; run_loop_closure_ = true;
// If there is a 'scan_queue_' already, some other thread will take care. // If there is a 'work_queue_' already, some other thread will take care.
if (scan_queue_ == nullptr) { if (work_queue_ == nullptr) {
scan_queue_ = common::make_unique<std::deque<std::function<void()>>>(); work_queue_ = common::make_unique<std::deque<std::function<void()>>>();
HandleScanQueue(); HandleWorkQueue();
} }
} }
} }
void SparsePoseGraph::HandleScanQueue() { void SparsePoseGraph::HandleWorkQueue() {
constraint_builder_.WhenDone( constraint_builder_.WhenDone(
[this](const sparse_pose_graph::ConstraintBuilder::Result& result) { [this](const sparse_pose_graph::ConstraintBuilder::Result& result) {
{ {
@ -319,16 +319,16 @@ void SparsePoseGraph::HandleScanQueue() {
num_scans_since_last_loop_closure_ = 0; num_scans_since_last_loop_closure_ = 0;
run_loop_closure_ = false; run_loop_closure_ = false;
while (!run_loop_closure_) { while (!run_loop_closure_) {
if (scan_queue_->empty()) { if (work_queue_->empty()) {
LOG(INFO) << "We caught up. Hooray!"; LOG(INFO) << "We caught up. Hooray!";
scan_queue_.reset(); work_queue_.reset();
return; return;
} }
scan_queue_->front()(); work_queue_->front()();
scan_queue_->pop_front(); work_queue_->pop_front();
} }
// We have to optimize again. // We have to optimize again.
HandleScanQueue(); HandleWorkQueue();
}); });
} }

View File

@ -142,8 +142,8 @@ class SparsePoseGraph : public mapping::SparsePoseGraph {
REQUIRES(mutex_); REQUIRES(mutex_);
// Registers the callback to run the optimization once all constraints have // Registers the callback to run the optimization once all constraints have
// been computed, that will also do all work that queue up in 'scan_queue_'. // been computed, that will also do all work that queue up in 'work_queue_'.
void HandleScanQueue() REQUIRES(mutex_); void HandleWorkQueue() REQUIRES(mutex_);
// Waits until we caught up (i.e. nothing is waiting to be scheduled), and // Waits until we caught up (i.e. nothing is waiting to be scheduled), and
// all computations have finished. // all computations have finished.
@ -166,9 +166,9 @@ class SparsePoseGraph : public mapping::SparsePoseGraph {
const mapping::proto::SparsePoseGraphOptions options_; const mapping::proto::SparsePoseGraphOptions options_;
common::Mutex mutex_; common::Mutex mutex_;
// If it exists, further scans must be added to this queue, and will be // If it exists, further work items must be added to this queue, and will be
// considered later. // considered later.
std::unique_ptr<std::deque<std::function<void()>>> scan_queue_ std::unique_ptr<std::deque<std::function<void()>>> work_queue_
GUARDED_BY(mutex_); GUARDED_BY(mutex_);
// How our various trajectories are related. // How our various trajectories are related.

View File

@ -49,7 +49,7 @@ SparsePoseGraph::SparsePoseGraph(
SparsePoseGraph::~SparsePoseGraph() { SparsePoseGraph::~SparsePoseGraph() {
WaitForAllComputations(); WaitForAllComputations();
common::MutexLocker locker(&mutex_); common::MutexLocker locker(&mutex_);
CHECK(scan_queue_ == nullptr); CHECK(work_queue_ == nullptr);
} }
std::vector<mapping::SubmapId> SparsePoseGraph::GrowSubmapTransformsAsNeeded( std::vector<mapping::SubmapId> SparsePoseGraph::GrowSubmapTransformsAsNeeded(
@ -135,10 +135,10 @@ void SparsePoseGraph::AddScan(
} }
void SparsePoseGraph::AddWorkItem(const std::function<void()>& work_item) { void SparsePoseGraph::AddWorkItem(const std::function<void()>& work_item) {
if (scan_queue_ == nullptr) { if (work_queue_ == nullptr) {
work_item(); work_item();
} else { } else {
scan_queue_->push_back(work_item); work_queue_->push_back(work_item);
} }
} }
@ -313,15 +313,15 @@ void SparsePoseGraph::ComputeConstraintsForScan(
num_scans_since_last_loop_closure_ > options_.optimize_every_n_scans()) { num_scans_since_last_loop_closure_ > options_.optimize_every_n_scans()) {
CHECK(!run_loop_closure_); CHECK(!run_loop_closure_);
run_loop_closure_ = true; run_loop_closure_ = true;
// If there is a 'scan_queue_' already, some other thread will take care. // If there is a 'work_queue_' already, some other thread will take care.
if (scan_queue_ == nullptr) { if (work_queue_ == nullptr) {
scan_queue_ = common::make_unique<std::deque<std::function<void()>>>(); work_queue_ = common::make_unique<std::deque<std::function<void()>>>();
HandleScanQueue(); HandleWorkQueue();
} }
} }
} }
void SparsePoseGraph::HandleScanQueue() { void SparsePoseGraph::HandleWorkQueue() {
constraint_builder_.WhenDone( constraint_builder_.WhenDone(
[this](const sparse_pose_graph::ConstraintBuilder::Result& result) { [this](const sparse_pose_graph::ConstraintBuilder::Result& result) {
{ {
@ -334,16 +334,16 @@ void SparsePoseGraph::HandleScanQueue() {
num_scans_since_last_loop_closure_ = 0; num_scans_since_last_loop_closure_ = 0;
run_loop_closure_ = false; run_loop_closure_ = false;
while (!run_loop_closure_) { while (!run_loop_closure_) {
if (scan_queue_->empty()) { if (work_queue_->empty()) {
LOG(INFO) << "We caught up. Hooray!"; LOG(INFO) << "We caught up. Hooray!";
scan_queue_.reset(); work_queue_.reset();
return; return;
} }
scan_queue_->front()(); work_queue_->front()();
scan_queue_->pop_front(); work_queue_->pop_front();
} }
// We have to optimize again. // We have to optimize again.
HandleScanQueue(); HandleWorkQueue();
}); });
} }

View File

@ -143,8 +143,8 @@ class SparsePoseGraph : public mapping::SparsePoseGraph {
REQUIRES(mutex_); REQUIRES(mutex_);
// Registers the callback to run the optimization once all constraints have // Registers the callback to run the optimization once all constraints have
// been computed, that will also do all work that queue up in 'scan_queue_'. // been computed, that will also do all work that queue up in 'work_queue_'.
void HandleScanQueue() REQUIRES(mutex_); void HandleWorkQueue() REQUIRES(mutex_);
// Waits until we caught up (i.e. nothing is waiting to be scheduled), and // Waits until we caught up (i.e. nothing is waiting to be scheduled), and
// all computations have finished. // all computations have finished.
@ -171,9 +171,9 @@ class SparsePoseGraph : public mapping::SparsePoseGraph {
const mapping::proto::SparsePoseGraphOptions options_; const mapping::proto::SparsePoseGraphOptions options_;
common::Mutex mutex_; common::Mutex mutex_;
// If it exists, further scans must be added to this queue, and will be // If it exists, further work items must be added to this queue, and will be
// considered later. // considered later.
std::unique_ptr<std::deque<std::function<void()>>> scan_queue_ std::unique_ptr<std::deque<std::function<void()>>> work_queue_
GUARDED_BY(mutex_); GUARDED_BY(mutex_);
// How our various trajectories are related. // How our various trajectories are related.