Remove legacy Jenkins quality pipeline. (#1510)

- it has been used only at Google for a quality pipeline
- the worker.py is Python 2 and closely tied to Google Cloud, so I don't
  believe there is much to be reused by someone else nowadays and not
  worth the effort to port to Python 3.

Signed-off-by: Michael Grupp <grupp@magazino.eu>
master
Michael Grupp 2020-08-28 15:08:15 +02:00 committed by GitHub
parent f30d1f7e1e
commit 051a018c47
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 0 additions and 666 deletions

View File

@ -1,122 +0,0 @@
# Copyright 2016 The Cartographer Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
FROM ros:kinetic
ARG CARTOGRAPHER_VERSION=master
# Xenial's base image doesn't ship with sudo.
RUN apt-get update && apt-get install -y sudo time && rm -rf /var/lib/apt/lists/*
# First, we invalidate the entire cache if cartographer-project/cartographer has
# changed. This file's content changes whenever master changes. See:
# http://stackoverflow.com/questions/36996046/how-to-prevent-dockerfile-caching-git-clone
ADD https://api.github.com/repos/cartographer-project/cartographer/git/refs/heads/master \
cartographer_ros/cartographer_version.json
# wstool needs the updated rosinstall file to clone the correct repos.
COPY cartographer_ros.rosinstall cartographer_ros/
COPY scripts/prepare_jenkins_catkin_workspace.sh cartographer_ros/scripts/
# Invalidates the Docker cache to ensure this command is always executed.
ARG CACHEBUST=1
RUN CARTOGRAPHER_VERSION=$CARTOGRAPHER_VERSION \
cartographer_ros/scripts/prepare_jenkins_catkin_workspace.sh
# rosdep needs the updated package.xml files to install the correct debs.
COPY cartographer_ros/package.xml catkin_ws/src/cartographer_ros/cartographer_ros/
COPY cartographer_ros_msgs/package.xml catkin_ws/src/cartographer_ros/cartographer_ros_msgs/
COPY cartographer_rviz/package.xml catkin_ws/src/cartographer_ros/cartographer_rviz/
COPY scripts/install_debs.sh cartographer_ros/scripts/
RUN cartographer_ros/scripts/install_debs.sh && rm -rf /var/lib/apt/lists/*
# Install proto3.
RUN /catkin_ws/src/cartographer/scripts/install_proto3.sh
# Build, install, and test all packages individually to allow caching. The
# ordering of these steps must match the topological package ordering as
# determined by Catkin.
COPY scripts/install.sh cartographer_ros/scripts/
COPY scripts/catkin_test_results.sh cartographer_ros/scripts/
COPY cartographer_ros_msgs catkin_ws/src/cartographer_ros/cartographer_ros_msgs/
RUN cartographer_ros/scripts/install.sh --pkg cartographer_ros_msgs && \
cartographer_ros/scripts/install.sh --pkg cartographer_ros_msgs \
--catkin-make-args run_tests && \
cartographer_ros/scripts/catkin_test_results.sh build_isolated/cartographer_ros_msgs
RUN cartographer_ros/scripts/install.sh --pkg ceres-solver
RUN cartographer_ros/scripts/install.sh --pkg cartographer && \
cartographer_ros/scripts/install.sh --pkg cartographer --make-args test
COPY cartographer_ros catkin_ws/src/cartographer_ros/cartographer_ros/
RUN cartographer_ros/scripts/install.sh --pkg cartographer_ros && \
cartographer_ros/scripts/install.sh --pkg cartographer_ros \
--catkin-make-args run_tests && \
cartographer_ros/scripts/catkin_test_results.sh build_isolated/cartographer_ros
COPY cartographer_rviz catkin_ws/src/cartographer_ros/cartographer_rviz/
RUN cartographer_ros/scripts/install.sh --pkg cartographer_rviz && \
cartographer_ros/scripts/install.sh --pkg cartographer_rviz \
--catkin-make-args run_tests && \
cartographer_ros/scripts/catkin_test_results.sh build_isolated/cartographer_rviz
RUN cartographer_ros/scripts/install.sh --pkg cartographer_toru
RUN cartographer_ros/scripts/install.sh --pkg cartographer_fetch
COPY scripts/ros_entrypoint.sh /
# A BTRFS bug may prevent us from cleaning up these directories.
# https://btrfs.wiki.kernel.org/index.php/Problem_FAQ#I_cannot_delete_an_empty_directory
RUN rm -rf cartographer_ros catkin_ws || true
RUN sudo apt-get update
RUN sudo apt-get -y install openjdk-8-jdk python-pip
ENV HOME /home/jenkins
RUN addgroup --system --gid 10000 jenkins
RUN adduser --system --ingroup jenkins --home $HOME --uid 10000 jenkins
LABEL Description="This is a base image, which provides the Jenkins agent executable (slave.jar)" Vendor="Jenkins project" Version="3.17"
ARG VERSION=3.17
ARG AGENT_WORKDIR=/home/jenkins/agent
RUN curl --create-dirs -sSLo /usr/share/jenkins/slave.jar https://repo.jenkins-ci.org/public/org/jenkins-ci/main/remoting/${VERSION}/remoting-${VERSION}.jar \
&& chmod 755 /usr/share/jenkins \
&& chmod 644 /usr/share/jenkins/slave.jar
# USER jenkins
ENV AGENT_WORKDIR=${AGENT_WORKDIR}
RUN mkdir /home/jenkins/.jenkins && mkdir -p ${AGENT_WORKDIR}
VOLUME /home/jenkins/.jenkins
VOLUME ${AGENT_WORKDIR}
WORKDIR /home/jenkins
COPY jenkins/jenkins-slave /usr/local/bin/jenkins-slave
ENV CLOUDSDK_CORE_DISABLE_PROMPTS 1
ENV PATH /opt/google-cloud-sdk/bin:$PATH
USER root
# Install Google Cloud Components
RUN curl https://sdk.cloud.google.com | bash && mv google-cloud-sdk /opt
RUN gcloud components install kubectl
RUN pip install --upgrade google-cloud-datastore
RUN pip install --upgrade google-cloud-bigquery
COPY jenkins/worker.py /worker.py
# USER root
ENTRYPOINT ["jenkins-slave"]

110
jenkins/Jenkinsfile vendored
View File

@ -1,110 +0,0 @@
podTemplate(label: 'node-0', containers: [
containerTemplate(
name: 'jnlp',
image: 'eggsy84/gcp-jenkins-slave-k8s-seed:latest',
ttyEnabled: false,
command: '',
privileged: true,
alwaysPullImage: false,
workingDir: '/home/jenkins',
args: '${computer.jnlpmac} ${computer.name}'
)
],
volumes: [
secretVolume(mountPath: '/opt/config', secretName: 'gcloud-svc-account'),
hostPathVolume(mountPath: '/var/run/docker.sock', hostPath: '/var/run/docker.sock'),
persistentVolumeClaim(claimName: 'data-claim-compile', mountPath: '/data'),
]
) {
node('node-0') {
stage('Compile') {
sh 'gcloud auth activate-service-account --key-file=/opt/config/gcloud-svc-account.json'
sh 'cd /data && rm -Rf *'
sh 'cd /data && git clone https://github.com/cartographer-project/cartographer_ros'
sh 'cd /data/cartographer_ros && docker build -f jenkins/Dockerfile.kinetic -t kinetic-jenkins-slave --build-arg CACHEBUST=$(date +%s) .'
}
stage('Push') {
sh 'docker tag kinetic-jenkins-slave eu.gcr.io/cartographer-141408/kinetic-jenkins-slave'
sh 'gcloud docker -- push eu.gcr.io/cartographer-141408/kinetic-jenkins-slave'
sh 'cd /data && rm -Rf *'
}
}
}
podTemplate(label: 'node-1', containers: [
containerTemplate(
name: 'jnlp',
image: 'eu.gcr.io/cartographer-141408/kinetic-jenkins-slave:latest',
ttyEnabled: false,
command: '',
privileged: true,
alwaysPullImage: true,
workingDir: '/home/jenkins',
args: '${computer.jnlpmac} ${computer.name}'
)
],
volumes: [
secretVolume(mountPath: '/opt/config', secretName: 'gcloud-svc-account'),
hostPathVolume(mountPath: '/var/run/docker.sock', hostPath: '/var/run/docker.sock'),
persistentVolumeClaim(claimName: 'data-claim-compile', mountPath: '/data'),
]
) {
node('node-1') {
stage('Run Fetch Pipeline') {
sh 'gcloud auth activate-service-account --key-file=/opt/config/gcloud-svc-account.json'
sh 'GOOGLE_APPLICATION_CREDENTIALS="/opt/config/gcloud-svc-account.json" GOOGLE_CLOUD_DISABLE_GRPC=True python /worker.py --worker_id 0 --num_workers 1 --pipeline_id fetch'
}
}
}
podTemplate(label: 'node-2', containers: [
containerTemplate(
name: 'jnlp',
image: 'eu.gcr.io/cartographer-141408/kinetic-jenkins-slave:latest',
ttyEnabled: false,
command: '',
privileged: true,
alwaysPullImage: true,
workingDir: '/home/jenkins',
args: '${computer.jnlpmac} ${computer.name}'
)
],
volumes: [
secretVolume(mountPath: '/opt/config', secretName: 'gcloud-svc-account'),
hostPathVolume(mountPath: '/var/run/docker.sock', hostPath: '/var/run/docker.sock'),
persistentVolumeClaim(claimName: 'data-claim-compile', mountPath: '/data'),
]
) {
node('node-2') {
stage('Run Backpack Pipeline') {
sh 'gcloud auth activate-service-account --key-file=/opt/config/gcloud-svc-account.json'
sh 'GOOGLE_APPLICATION_CREDENTIALS="/opt/config/gcloud-svc-account.json" GOOGLE_CLOUD_DISABLE_GRPC=True python /worker.py --worker_id 0 --num_workers 1 --pipeline_id backpack'
}
}
}
podTemplate(label: 'node-3', containers: [
containerTemplate(
name: 'jnlp',
image: 'eu.gcr.io/cartographer-141408/kinetic-jenkins-slave:latest',
ttyEnabled: false,
command: '',
privileged: true,
alwaysPullImage: true,
workingDir: '/home/jenkins',
args: '${computer.jnlpmac} ${computer.name}'
)
],
volumes: [
secretVolume(mountPath: '/opt/config', secretName: 'gcloud-svc-account'),
hostPathVolume(mountPath: '/var/run/docker.sock', hostPath: '/var/run/docker.sock'),
persistentVolumeClaim(claimName: 'data-claim-compile', mountPath: '/data'),
]
) {
node('node-3') {
stage('Run Toru Pipeline') {
sh 'gcloud auth activate-service-account --key-file=/opt/config/gcloud-svc-account.json'
sh 'GOOGLE_APPLICATION_CREDENTIALS="/opt/config/gcloud-svc-account.json" GOOGLE_CLOUD_DISABLE_GRPC=True python /worker.py --worker_id 0 --num_workers 1 --pipeline_id toru'
}
}
}

View File

@ -1,84 +0,0 @@
#!/usr/bin/env sh
# The MIT License
#
# Copyright (c) 2015, CloudBees, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Usage jenkins-slave.sh [options] -url http://jenkins [SECRET] [AGENT_NAME]
# Optional environment variables :
# * JENKINS_TUNNEL : HOST:PORT for a tunnel to route TCP traffic to jenkins host, when jenkins can't be directly accessed over network
# * JENKINS_URL : alternate jenkins URL
# * JENKINS_SECRET : agent secret, if not set as an argument
# * JENKINS_AGENT_NAME : agent name, if not set as an argument
if [ $# -eq 1 ]; then
# if `docker run` only has one arguments, we assume user is running alternate command like `bash` to inspect the image
exec "$@"
else
# if -tunnel is not provided try env vars
case "$@" in
*"-tunnel "*) ;;
*)
if [ ! -z "$JENKINS_TUNNEL" ]; then
TUNNEL="-tunnel $JENKINS_TUNNEL"
fi ;;
esac
if [ -n "$JENKINS_URL" ]; then
URL="-url $JENKINS_URL"
fi
if [ -n "$JENKINS_NAME" ]; then
JENKINS_AGENT_NAME="$JENKINS_NAME"
fi
if [ -z "$JNLP_PROTOCOL_OPTS" ]; then
echo "Warning: JnlpProtocol3 is disabled by default, use JNLP_PROTOCOL_OPTS to alter the behavior"
JNLP_PROTOCOL_OPTS="-Dorg.jenkinsci.remoting.engine.JnlpProtocol3.disabled=true"
fi
# If both required options are defined, do not pass the parameters
OPT_JENKINS_SECRET=""
if [ -n "$JENKINS_SECRET" ]; then
case "$@" in
*"${JENKINS_SECRET}"*) echo "Warning: SECRET is defined twice in command-line arguments and the environment variable" ;;
*)
OPT_JENKINS_SECRET="${JENKINS_SECRET}" ;;
esac
fi
OPT_JENKINS_AGENT_NAME=""
if [ -n "$JENKINS_AGENT_NAME" ]; then
case "$@" in
*"${JENKINS_AGENT_NAME}"*) echo "Warning: AGENT_NAME is defined twice in command-line arguments and the environment variable" ;;
*)
OPT_JENKINS_AGENT_NAME="${JENKINS_AGENT_NAME}" ;;
esac
fi
#TODO: Handle the case when the command-line and Environment variable contain different values.
#It is fine it blows up for now since it should lead to an error anyway.
exec java $JAVA_OPTS $JNLP_PROTOCOL_OPTS -cp /usr/share/jenkins/slave.jar hudson.remoting.jnlp.Main -headless $TUNNEL $URL $OPT_JENKINS_SECRET $OPT_JENKINS_AGENT_NAME "$@"
fi

View File

@ -1,350 +0,0 @@
"""This is the script executed by workers of the quality control pipline."""
import argparse
import datetime
from os.path import basename
from pprint import pprint
import re
import subprocess
from google.cloud import bigquery
from google.cloud import datastore
class Pattern(object):
"""Defines a pattern for regular expression matching."""
def __init__(self, pattern):
self.regex = re.compile(pattern, re.MULTILINE)
def extract(self, text):
"""Returns a dictionary of named capture groups to extracted output.
Args:
text: input to parse
Returns an empty dict if no match was found.
"""
match = self.regex.search(text)
if match is None:
return {}
return match.groupdict()
def extract_last_occurence(self, text):
"""Returns tuple of extracted outputs.
Args:
text: input to parse
Returns the information extracted from the last match. Returns
None if no match was found.
"""
matches = self.regex.findall(text)
if not matches:
return None
return matches[-1]
# BigQuery table schema
SCHEMA = [
bigquery.SchemaField('date', 'DATE'),
bigquery.SchemaField('commit_sha1', 'STRING'),
bigquery.SchemaField('job_id', 'INTEGER'),
bigquery.SchemaField('rosbag', 'STRING'),
bigquery.SchemaField('user_time_secs', 'FLOAT'),
bigquery.SchemaField('system_time_secs', 'FLOAT'),
bigquery.SchemaField('wall_time_secs', 'FLOAT'),
bigquery.SchemaField('max_set_size_kbytes', 'INTEGER'),
bigquery.SchemaField('constraints_count', 'INTEGER'),
bigquery.SchemaField('constraints_score_minimum', 'FLOAT'),
bigquery.SchemaField('constraints_score_maximum', 'FLOAT'),
bigquery.SchemaField('constraints_score_mean', 'FLOAT'),
bigquery.SchemaField('ground_truth_abs_trans_err', 'FLOAT'),
bigquery.SchemaField('ground_truth_abs_trans_err_dev', 'FLOAT'),
bigquery.SchemaField('ground_truth_sqr_trans_err', 'FLOAT'),
bigquery.SchemaField('ground_truth_sqr_trans_err_dev', 'FLOAT'),
bigquery.SchemaField('ground_truth_abs_rot_err', 'FLOAT'),
bigquery.SchemaField('ground_truth_abs_rot_err_dev', 'FLOAT'),
bigquery.SchemaField('ground_truth_sqr_rot_err', 'FLOAT'),
bigquery.SchemaField('ground_truth_sqr_rot_err_dev', 'FLOAT')
]
# Pattern matchers for the various fields of the '/usr/bin/time -v' output
USER_TIME_PATTERN = Pattern(
r'^\s*User time \(seconds\): (?P<user_time>\d+.\d+|\d+)')
SYSTEM_TIME_PATTERN = Pattern(
r'^\s*System time \(seconds\): (?P<system_time>\d+.\d+|\d+)')
WALL_TIME_PATTERN = Pattern(
r'^\s*Elapsed \(wall clock\) time \(h:mm:ss or m:ss\): '
r'((?P<hours>\d{1,2}):|)(?P<minutes>\d{1,2}):(?P<seconds>\d{2}\.\d{2})')
MAX_RES_SET_SIZE_PATTERN = Pattern(
r'^\s*Maximum resident set size \(kbytes\): (?P<max_set_size>\d+)')
CONSTRAINT_STATS_PATTERN = Pattern(
r'Score histogram:[\n\r]+'
r'Count:\s+(?P<constraints_count>\d+)\s+'
r'Min:\s+(?P<constraints_score_min>\d+\.\d+)\s+'
r'Max:\s+(?P<constraints_score_max>\d+\.\d+)\s+'
r'Mean:\s+(?P<constraints_score_mean>\d+\.\d+)')
GROUND_TRUTH_STATS_PATTERN = Pattern(
r'Result:[\n\r]+'
r'Abs translational error (?P<abs_trans_err>\d+\.\d+) '
r'\+/- (?P<abs_trans_err_dev>\d+\.\d+) m[\n\r]+'
r'Sqr translational error (?P<sqr_trans_err>\d+\.\d+) '
r'\+/- (?P<sqr_trans_err_dev>\d+\.\d+) m\^2[\n\r]+'
r'Abs rotational error (?P<abs_rot_err>\d+\.\d+) '
r'\+/- (?P<abs_rot_err_dev>\d+\.\d+) deg[\n\r]+'
r'Sqr rotational error (?P<sqr_rot_err>\d+\.\d+) '
r'\+/- (?P<sqr_rot_err_dev>\d+\.\d+) deg\^2')
# Pattern matcher for extracting the HEAD commit SHA-1 hash.
GIT_SHA1_PATTERN = Pattern(r'^(?P<sha1>[0-9a-f]{40})\s+HEAD')
def get_head_git_sha1():
"""Returns the SHA-1 hash of the commit tagged HEAD."""
output = subprocess.check_output([
'git', 'ls-remote',
'https://github.com/cartographer-project/cartographer.git'
])
parsed = GIT_SHA1_PATTERN.extract(output)
return parsed['sha1']
def extract_stats(inp):
"""Returns a dictionary of stats."""
result = {}
parsed = USER_TIME_PATTERN.extract(inp)
result['user_time_secs'] = float(parsed['user_time'])
parsed = SYSTEM_TIME_PATTERN.extract(inp)
result['system_time_secs'] = float(parsed['system_time'])
parsed = WALL_TIME_PATTERN.extract(inp)
result['wall_time_secs'] = float(parsed['hours'] or 0.) * 3600 + float(
parsed['minutes']) * 60 + float(parsed['seconds'])
parsed = MAX_RES_SET_SIZE_PATTERN.extract(inp)
result['max_set_size_kbytes'] = int(parsed['max_set_size'])
parsed = CONSTRAINT_STATS_PATTERN.extract_last_occurence(inp)
print parsed
result['constraints_count'] = int(parsed[0])
result['constraints_score_min'] = float(parsed[1])
result['constraints_score_max'] = float(parsed[2])
result['constraints_score_mean'] = float(parsed[3])
return result
def extract_ground_truth_stats(input_text):
"""Returns a dictionary of ground truth stats."""
result = {}
parsed = GROUND_TRUTH_STATS_PATTERN.extract(input_text)
for name in ('abs_trans_err', 'sqr_trans_err', 'abs_rot_err', 'sqr_rot_err'):
result['ground_truth_{}'.format(name)] = float(parsed[name])
result['ground_truth_{}_dev'.format(name)] = float(
parsed['{}_dev'.format(name)])
return result
def retrieve_entity(datastore_client, kind, identifier):
"""Convenience function for Datastore entity retrieval."""
key = datastore_client.key(kind, identifier)
return datastore_client.get(key)
def create_job_selector(worker_id, num_workers):
"""Constructs a round-robin job selector."""
return lambda job_id: job_id % num_workers == worker_id
def run_cmd(cmd):
"""Runs command both printing its stdout output and returning it as string."""
print cmd
p = subprocess.Popen(
cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
run_cmd.output = []
def process(line):
run_cmd.output.append(line)
print line.rstrip()
while p.poll() is None:
process(p.stdout.readline())
process(p.stdout.read())
return '\n'.join(run_cmd.output)
def run_ros_cmd(ros_distro, ros_cmd):
"""Runs command similar to run_cmd but sets ROS environment variables."""
cmd = ('/bin/bash -c \"source /opt/ros/{}/setup.bash && source '
'/opt/cartographer_ros/setup.bash && {}\"').format(
ros_distro, ros_cmd)
return run_cmd(cmd)
class Job(object):
"""Represents a single job to be executed.
A job consists of a combination of rosbag and configuration and launch files.
"""
def __init__(self, datastore_client, job_id):
self.id = job_id
entity = retrieve_entity(datastore_client, 'Job', job_id)
self.launch_file = entity['launch_file']
self.assets_writer_launch_file = entity['assets_writer_launch_file']
self.assets_writer_config_file = entity['assets_writer_config_file']
self.rosbag = entity['rosbag']
self.ros_package = entity['ros_package']
def __repr__(self):
return 'Job: id : {} launch_file: {} rosbag: {}'.format(
self.id, self.launch_file, self.rosbag)
def run(self, ros_distro, run_id):
"""Runs the job with ROS distro 'ros_distro'."""
print 'running job {}'.format(self.id)
# Garbage collect any left-overs from previous runs.
run_cmd('rm -rf /data/*')
# Copy the rosbag to scratch space
scratch_dir = '/data/{}'.format(self.id)
rosbag_filename = basename(self.rosbag)
run_cmd('mkdir {}'.format(scratch_dir))
run_cmd('gsutil cp gs://{} {}/{}'.format(self.rosbag, scratch_dir,
rosbag_filename))
# Creates pbstream
output = run_ros_cmd(ros_distro,
'/usr/bin/time -v roslaunch {} {} '
'bag_filenames:={}/{} no_rviz:=true'.format(
self.ros_package, self.launch_file, scratch_dir,
rosbag_filename))
info = extract_stats(output)
# Creates assets.
run_ros_cmd(
ros_distro, '/usr/bin/time -v roslaunch {} {} '
'bag_filenames:={}/{} pose_graph_filename:='
'{}/{}.pbstream config_file:={}'.format(
self.ros_package, self.assets_writer_launch_file, scratch_dir,
rosbag_filename, scratch_dir, rosbag_filename,
self.assets_writer_config_file))
# Copies assets to bucket.
run_cmd('gsutil cp {}/{}.pbstream '
'gs://cartographer-ci-artifacts/{}/{}/{}.pbstream'.format(
scratch_dir, rosbag_filename, run_id, self.id, rosbag_filename))
run_cmd('gsutil cp {}/{}_* gs://cartographer-ci-artifacts/{}/{}/'.format(
scratch_dir, rosbag_filename, run_id, self.id))
# Download ground truth relations file.
run_cmd('gsutil cp gs://cartographer-ci-ground-truth/{}/relations.pb '
'{}/relations.pb'.format(self.id, scratch_dir))
# Calculate metrics.
output = run_ros_cmd(ros_distro, 'cartographer_compute_relations_metrics '
'-relations_filename {}/relations.pb '
'-pose_graph_filename {}/{}.pbstream'.format(
scratch_dir, scratch_dir, rosbag_filename))
# Add ground truth stats.
info.update(extract_ground_truth_stats(output))
info['rosbag'] = rosbag_filename
return info
class Worker(object):
"""Represents a single worker that executes a sequence of Jobs."""
def __init__(self, datastore_client, pipeline_id, run_id):
entity = retrieve_entity(datastore_client, 'PipelineConfig', pipeline_id)
self.pipeline_id = pipeline_id
self.jobs = [Job(datastore_client, job_id) for job_id in entity['jobs']]
self.scratch_dir = entity['scratch_dir']
self.ros_distro = entity['ros_distro']
self.run_id = run_id
def __repr__(self):
result = 'Worker: pipeline_id: {}\n'.format(self.pipeline_id)
for job in self.jobs:
result += '{}\n'.format(str(job))
return result
def run_jobs(self, selector):
outputs = {}
for idx, job in enumerate(self.jobs):
if selector(idx):
output = job.run(self.ros_distro, self.run_id)
outputs[job.id] = output
else:
print 'job {}: skip'.format(job.id)
return outputs
def publish_stats_to_big_query(stats_dict, now, head_sha1):
"""Publishes metrics to BigQuery."""
bigquery_client = bigquery.Client()
dataset = bigquery_client.dataset('Cartographer')
table = dataset.table('metrics')
rows_to_insert = []
for job_identifier, job_info in stats_dict.iteritems():
print job_info
data = ('{}-{}-{}'.format(
now.year, now.month,
now.day), head_sha1, job_identifier, job_info['rosbag'],
job_info['user_time_secs'], job_info['system_time_secs'],
job_info['wall_time_secs'], job_info['max_set_size_kbytes'],
job_info['constraints_count'], job_info['constraints_score_min'],
job_info['constraints_score_max'],
job_info['constraints_score_mean'],
job_info['ground_truth_abs_trans_err'],
job_info['ground_truth_abs_trans_err_dev'],
job_info['ground_truth_sqr_trans_err'],
job_info['ground_truth_sqr_trans_err_dev'],
job_info['ground_truth_abs_rot_err'],
job_info['ground_truth_abs_rot_err_dev'],
job_info['ground_truth_sqr_rot_err'],
job_info['ground_truth_sqr_rot_err_dev'])
rows_to_insert.append(data)
errors = bigquery_client.create_rows(
table, rows_to_insert, selected_fields=SCHEMA)
if not errors:
print 'Pushed {} row(s) into Cartographer:metrics'.format(
len(rows_to_insert))
else:
print 'Errors:'
pprint(errors)
def parse_arguments():
"""Parses the command line arguments."""
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('--worker_id', type=int)
parser.add_argument('--num_workers', type=int)
parser.add_argument('--pipeline_id', type=str)
return parser.parse_args()
def main():
args = parse_arguments()
ds_client = datastore.Client()
job_selector = create_job_selector(int(args.worker_id), int(args.num_workers))
head_sha1 = get_head_git_sha1()
now = datetime.datetime.now()
pipeline_run_id = '{}-{}-{}_{}'.format(now.year, now.month, now.day,
head_sha1)
worker = Worker(ds_client, args.pipeline_id, pipeline_run_id)
stats_dict = worker.run_jobs(job_selector)
publish_stats_to_big_query(stats_dict, now, head_sha1)
if __name__ == '__main__':
main()