Add ground truth stats to quality control pipeline. (#578)

master
Christoph Schütte 2017-11-07 20:30:54 +01:00 committed by GitHub
parent 1b8869bbf6
commit e63126f329
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 73 additions and 15 deletions

View File

@ -58,7 +58,15 @@ SCHEMA = [
bigquery.SchemaField('constraints_count', 'INTEGER'), bigquery.SchemaField('constraints_count', 'INTEGER'),
bigquery.SchemaField('constraints_score_minimum', 'FLOAT'), bigquery.SchemaField('constraints_score_minimum', 'FLOAT'),
bigquery.SchemaField('constraints_score_maximum', 'FLOAT'), bigquery.SchemaField('constraints_score_maximum', 'FLOAT'),
bigquery.SchemaField('constraints_score_mean', 'FLOAT') bigquery.SchemaField('constraints_score_mean', 'FLOAT'),
bigquery.SchemaField('ground_truth_abs_trans_err', 'FLOAT'),
bigquery.SchemaField('ground_truth_abs_trans_err_dev', 'FLOAT'),
bigquery.SchemaField('ground_truth_sqr_trans_err', 'FLOAT'),
bigquery.SchemaField('ground_truth_sqr_trans_err_dev', 'FLOAT'),
bigquery.SchemaField('ground_truth_abs_rot_err', 'FLOAT'),
bigquery.SchemaField('ground_truth_abs_rot_err_dev', 'FLOAT'),
bigquery.SchemaField('ground_truth_sqr_rot_err', 'FLOAT'),
bigquery.SchemaField('ground_truth_sqr_rot_err_dev', 'FLOAT')
] ]
# Pattern matchers for the various fields of the '/usr/bin/time -v' output # Pattern matchers for the various fields of the '/usr/bin/time -v' output
@ -77,6 +85,16 @@ CONSTRAINT_STATS_PATTERN = Pattern(
r'Min:\s+(?P<constraints_score_min>\d+\.\d+)\s+' r'Min:\s+(?P<constraints_score_min>\d+\.\d+)\s+'
r'Max:\s+(?P<constraints_score_max>\d+\.\d+)\s+' r'Max:\s+(?P<constraints_score_max>\d+\.\d+)\s+'
r'Mean:\s+(?P<constraints_score_mean>\d+\.\d+)') r'Mean:\s+(?P<constraints_score_mean>\d+\.\d+)')
GROUND_TRUTH_STATS_PATTERN = Pattern(
r'Result:[\n\r]+'
r'Abs translational error (?P<abs_trans_err>\d+\.\d+) '
r'\+/- (?P<abs_trans_err_dev>\d+\.\d+) m[\n\r]+'
r'Sqr translational error (?P<sqr_trans_err>\d+\.\d+) '
r'\+/- (?P<sqr_trans_err_dev>\d+\.\d+) m\^2[\n\r]+'
r'Abs rotational error (?P<abs_rot_err>\d+\.\d+) '
r'\+/- (?P<abs_rot_err_dev>\d+\.\d+) deg[\n\r]+'
r'Sqr rotational error (?P<sqr_rot_err>\d+\.\d+) '
r'\+/- (?P<sqr_rot_err_dev>\d+\.\d+) deg\^2')
# Pattern matcher for extracting the HEAD commit SHA-1 hash. # Pattern matcher for extracting the HEAD commit SHA-1 hash.
GIT_SHA1_PATTERN = Pattern(r'^(?P<sha1>[0-9a-f]{40})\s+HEAD') GIT_SHA1_PATTERN = Pattern(r'^(?P<sha1>[0-9a-f]{40})\s+HEAD')
@ -119,6 +137,17 @@ def extract_stats(inp):
return result return result
def extract_ground_truth_stats(input_text):
"""Returns a dictionary of ground truth stats."""
result = {}
parsed = GROUND_TRUTH_STATS_PATTERN.extract(input_text)
for name in ('abs_trans_err', 'sqr_trans_err', 'abs_rot_err', 'sqr_rot_err'):
result['ground_truth_{}'.format(name)] = float(parsed[name])
result['ground_truth_{}_dev'.format(name)] = float(
parsed['{}_dev'.format(name)])
return result
def retrieve_entity(datastore_client, kind, identifier): def retrieve_entity(datastore_client, kind, identifier):
"""Convenience function for Datastore entity retrieval.""" """Convenience function for Datastore entity retrieval."""
key = datastore_client.key(kind, identifier) key = datastore_client.key(kind, identifier)
@ -132,6 +161,7 @@ def create_job_selector(worker_id, num_workers):
def run_cmd(cmd): def run_cmd(cmd):
"""Runs command both printing its stdout output and returning it as string.""" """Runs command both printing its stdout output and returning it as string."""
print cmd
p = subprocess.Popen( p = subprocess.Popen(
cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
run_cmd.output = [] run_cmd.output = []
@ -146,6 +176,14 @@ def run_cmd(cmd):
return '\n'.join(run_cmd.output) return '\n'.join(run_cmd.output)
def run_ros_cmd(ros_distro, ros_cmd):
"""Runs command similar to run_cmd but sets ROS environment variables."""
cmd = ('/bin/bash -c \"source /opt/ros/{}/setup.bash && source '
'/opt/cartographer_ros/setup.bash && {}\"').format(
ros_distro, ros_cmd)
return run_cmd(cmd)
class Job(object): class Job(object):
"""Represents a single job to be executed. """Represents a single job to be executed.
@ -179,20 +217,19 @@ class Job(object):
rosbag_filename)) rosbag_filename))
# Creates pbstream # Creates pbstream
output = run_cmd( output = run_ros_cmd(ros_distro,
'/bin/bash -c \"source /opt/ros/{}/setup.bash && source ' '/usr/bin/time -v roslaunch cartographer_ros {} '
'/opt/cartographer_ros/setup.bash && /usr/bin/time -v roslaunch ' 'bag_filenames:={}/{} no_rviz:=true'.format(
'cartographer_ros {} bag_filenames:={}/{} no_rviz:=true\"'.format( self.launch_file, scratch_dir, rosbag_filename))
ros_distro, self.launch_file, scratch_dir, rosbag_filename)) info = extract_stats(output)
# Creates assets. # Creates assets.
run_cmd('/bin/bash -c \"source /opt/ros/{}/setup.bash && source ' run_ros_cmd(
'/opt/cartographer_ros/setup.bash && /usr/bin/time -v roslaunch ' ros_distro, '/usr/bin/time -v roslaunch cartographer_ros {} '
'cartographer_ros {} bag_filenames:={}/{} ' 'bag_filenames:={}/{} pose_graph_filename:='
'pose_graph_filename:={}/{}.pbstream config_file:={}\"'.format( '{}/{}.pbstream config_file:={}'.format(
ros_distro, self.assets_writer_launch_file, scratch_dir, self.assets_writer_launch_file, scratch_dir, rosbag_filename,
rosbag_filename, scratch_dir, rosbag_filename, scratch_dir, rosbag_filename, self.assets_writer_config_file))
self.assets_writer_config_file))
# Copies assets to bucket. # Copies assets to bucket.
run_cmd('gsutil cp {}/{}.pbstream ' run_cmd('gsutil cp {}/{}.pbstream '
@ -201,7 +238,19 @@ class Job(object):
run_cmd('gsutil cp {}/{}_* gs://cartographer-ci-artifacts/{}/{}/'.format( run_cmd('gsutil cp {}/{}_* gs://cartographer-ci-artifacts/{}/{}/'.format(
scratch_dir, rosbag_filename, run_id, self.id)) scratch_dir, rosbag_filename, run_id, self.id))
info = extract_stats(output) # Download ground truth relations file.
run_cmd('gsutil cp gs://cartographer-ci-ground-truth/{}/relations.pb '
'{}/relations.pb'.format(self.id, scratch_dir))
# Calculate metrics.
output = run_ros_cmd(ros_distro, 'cartographer_compute_relations_metrics '
'-relations_filename {}/relations.pb '
'-pose_graph_filename {}/{}.pbstream'.format(
scratch_dir, scratch_dir, rosbag_filename))
# Add ground truth stats.
info.update(extract_ground_truth_stats(output))
info['rosbag'] = rosbag_filename info['rosbag'] = rosbag_filename
return info return info
@ -249,7 +298,16 @@ def publish_stats_to_big_query(stats_dict, now, head_sha1):
job_info['wall_time_secs'], job_info['max_set_size_kbytes'], job_info['wall_time_secs'], job_info['max_set_size_kbytes'],
job_info['constraints_count'], job_info['constraints_score_min'], job_info['constraints_count'], job_info['constraints_score_min'],
job_info['constraints_score_max'], job_info['constraints_score_max'],
job_info['constraints_score_mean']) job_info['constraints_score_mean'],
job_info['ground_truth_abs_trans_err'],
job_info['ground_truth_abs_trans_err_dev'],
job_info['ground_truth_sqr_trans_err'],
job_info['ground_truth_sqr_trans_err_dev'],
job_info['ground_truth_abs_rot_err'],
job_info['ground_truth_abs_rot_err_dev'],
job_info['ground_truth_sqr_rot_err'],
job_info['ground_truth_sqr_rot_err_dev'])
rows_to_insert.append(data) rows_to_insert.append(data)
errors = bigquery_client.create_rows( errors = bigquery_client.create_rows(