#!/usr/bin/python # # jenkinstool -- manipulate jobs in a running Jenkins instance # # Copyright 2012 Lars Wirzenius # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . import cliapp import json import logging import StringIO import time import ttystatus from xml.etree import ElementTree as ET import simplejenkinsapi have_vcs_copy = ''' have_vcs_copy=yes ''' no_vcs_copy = ''' have_vcs_copy=no ''' prelude = ''' set -eu if %(global.pretend)s; then exit 0; fi url_exists() { wget -q -O /dev/null "$1" } get_debian_changelog_version() { # Can't use dpkg-parsechangelog, since the Jenins master node does not # have it (installing it would drag in a lot of extra stuff). head -n 1 debian/changelog | sed -e 's/^[^(]*(//' -e 's/).*//' } upstream_version() { if [ -e setup.py ] then python setup.py --version && return fi if [ -e debian/changelog ] then get_debian_changelog_version | sed 's/-[^-]*$//' return fi echo "Can't figure out upstream version" 1>&2 exit 1 } commit_date() { if [ -e .bzr ] then commit_timestamp="$(bzr log -l1 --timezone=utc | sed -n '/^timestamp: /s///p')" elif [ -e .git ] then commit_timestamp="$(git log -1 --date=iso | sed -n '/^Date:/s///p')" else echo "UNKNOWN-COMMIT-TIME" return fi date -u --date="$commit_timestamp" +%%Y%%m%%dT%%H%%M%%S } is_release() { local project="$1" local version="$2" if [ -d .bzr ] then if bzr log -l1 | sed -n '/^tags: /s///p' | tr ',' '\n' | tr -d ' ' | grep -Fx "${project}-${version}" then return 0 else return 1 fi elif [ -d .git ] then local output="$(git name-rev --tags --name-only HEAD)" case "$output" in "${project}-${version}^0") return 0 ;; # annotated tag "${project}-${version}") return 0 ;; # plain tag *) return 1 ;; esac else echo "Unknown version control system" 1>&2 exit 1 fi } project="%(project.name)s" artifacts="%(global.artifacts)s/${project}" project_artifacts="$artifacts/$project" project_url="%(global.artifacts-url)s/$project" pbuilder_ci_tgz=/var/cache/pbuilder/ci.tgz pbuilder_release_tgz=/var/cache/pbuilder/release.tgz vcsproject="${project}_localhost_vcs-update" if [ -d "/var/lib/jenkins/jobs/${vcsproject}/workspace" ] then vcsworkspace="/var/lib/jenkins/jobs/${vcsproject}/workspace" else vcsworkspace="/var/lib/jenkins/workspace/${vcsproject}" fi workdir="$(pwd)" if [ -e "$vcsworkspace" ] || [ "$have_vcs_copy" = yes ]] then if [ -e "$vcsworkspace" ] then cd "$vcsworkspace" fi version="$(upstream_version)" if ! is_release "$project" "$version" then version="$version.0.vcs$(commit_date)" fi tarball="${project}-${version}.tar.gz" tarball_url="${project_url}/$tarball" if [ -d debian ] then if is_release "$project" "$version" then debianversion="$(get_debian_changelog_version)" case "$debianversion" in $version-*|$version) echo "Upstream version $version and " \ "debian/changelog version $debianversion" \ "match, good" ;; *) echo "Upstream version $version and " \ "debian/changelog version $debianversion" \ "do NOT match, bad" 1>&2 exit 1 ;; esac basetgz="$pbuilder_release_tgz" debian_dist="%(host.debian-dist)s" if [ "$debian_dist" != unstable ] then debianversion="$debianversion.$debian_dist" fi else basetgz="$pbuilder_ci_tgz" debianversion="${version}-1.%(host.debian-dist)s" debian_dist="%(host.debian-dist)s-ci" fi debianurl="%(global.artifacts-url)s/debian" sourcepkg="$(head -n1 debian/control | awk '/^Source:/ { print $2 }')" case "$sourcepkg" in lib*) subdir=$(echo "$sourcepkg" | cut -c1-4) ;; *) subdir=$(echo "$sourcepkg" | cut -c1) ;; esac poolpath="pool/main/$subdir/$sourcepkg" poolurl="%(global.artifacts-url)s/debian/$poolpath" origtgz="${sourcepkg}_${version}.orig.tar.gz" changes="${sourcepkg}_${debianversion}_source.changes" dsc="${sourcepkg}_$debianversion.dsc" dsc_url="$poolurl/$dsc" debian_dsc_url="http://cdn.debian.net/debian/$poolpath/$dsc" archany_changes="${sourcepkg}_${debianversion}_%(host.arch)s.changes" archall_changes="${sourcepkg}_${debianversion}_all.changes" fi cd "$workdir" else echo "$vcsworkspace does not exist, not setting version, etc" fi ''' class JobGenerator(object): '''Generate Jenkins jobs from a JSON specification file.''' def __init__(self, pretend, pbuilder_max_age, artifacts_url, jenkins_host, debian_mirror): self.pretend = pretend self.pbuilder_max_age = pbuilder_max_age self.artifacts_url = artifacts_url self.jenkins_host = jenkins_host self.debian_mirror = debian_mirror def generate_setup_jobs(self, config): '''Generate all the shared jobs to setup Jenkins. Return list of (job_id, config_xml) pairs. ''' jobs = [] # Setup reprepro on Jenkins host. reprepro = self.reprepro_setup_job(config) jobs.append(reprepro) # Create or update pbuilder tarballs. for host in config['hosts']: jc = self.pbuilder_create_job(host) jobs.append(jc) return [(jc.job_id(), jc.tostring()) for jc in jobs] def reprepro_setup_job(self, config): '''Create job that sets up reprepro.''' host = { 'name': 'localhost' } project = { 'name': 'reprepro-setup' } jc = self.create_job_config(host, project, 'reprepro-setup') jc.set_name('%(project.name)s %(host.name)s reporepo setup') script = ''' if %(global.pretend)s; then exit 0; fi base="%(global.repreprobase)s" mkdir -p "$base" mkdir -p "$base/incoming" mkdir -p "$base/incoming.temp" mkdir -p "$base/conf" cat < "$base/conf/uploaders" allow * by anybody end ''' archs = set() dists = set() for host in config['hosts']: if 'arch' in host: archs.add(host['arch']) if 'debian-dist' in host: dists.add(host['debian-dist']) script += ''' rm -f "$base/conf/distributions" ''' for dist in dists: script += ''' cat <> "$base/conf/distributions" Codename: %(dist)s Suite: %(dist)s Origin: code.liw.fi Description: release builds by CI system Architectures: source %(archs)s Components: main Uploaders: uploaders Tracking: keep includechanges Codename: %(dist)s-ci Suite: %(dist)s-ci Origin: code.liw.fi Description: CI builds Architectures: source %(archs)s Components: main Uploaders: uploaders Tracking: keep includechanges end ''' % { 'dist': dist, 'archs': ' '.join(archs), } script += ''' cat < "$base/conf/incoming" Name: default IncomingDir: incoming TempDir: incoming.temp Allow: %(dists)s %(cidists)s end ''' % { 'dists': ' '.join(dists), 'cidists': ' '.join('%s-ci' % dist for dist in dists), } script += ''' reprepro -b "$base" --verbose export ''' jc.add_shell_command(script) return jc def pbuilder_create_job(self, host): '''Create job create/update pbuilder base.tgz.''' project = { 'name': 'pbuilder-create' } jc = self.create_job_config(host, project, 'pbuilder-create') jc.set_name('%(project.name)s %(host.name)s pbuilder-create') jc.set_description( 'Create or update pbuilder base.tgz on %(host.ssh-target)s') jc.add_ssh_command(no_vcs_copy + prelude + ''' echo "Creating/updating pbuilder tarballs." setup_pbuilder() { local basetgz="$1" local releaseonly="$2" local dist="%(host.debian-dist)s" if [ -e "$basetgz" ] then if find $(dirname "$basetgz") -maxdepth 1 \ -name $(basename "$basetgz") -mtime +%(global.pbuilder-max-age)s | grep . then opts="--update --override-config" else echo "$basetgz is pretty new, keeping it" sudo pbuilder --clean return fi else opts="--create" fi if %(global.pretend)s then echo "PRETEND: pbuilder create/update $basetgz" elif [ "$releaseonly" = yes ] then sudo pbuilder $opts \ --basetgz "$basetgz" \ --distribution "$dist" \ --mirror "%(global.debian-mirror)s" \ --othermirror \ "deb %(global.artifacts-url)s/debian $dist main | deb http://code.liw.fi/debian ${dist} main" sudo pbuilder --clean else sudo pbuilder $opts \ --basetgz "$basetgz" \ --distribution "$dist" \ --mirror "%(global.debian-mirror)s" \ --othermirror \ "deb %(global.artifacts-url)s/debian $dist main | deb %(global.artifacts-url)s/debian ${dist}-ci main" sudo pbuilder --clean fi } setup_pbuilder "$pbuilder_ci_tgz" no setup_pbuilder "$pbuilder_release_tgz" yes ''') return jc def generate_project(self, config, project): '''Generate all the jobs for a given project.''' localhost = { 'name': 'localhost', 'arch': 'irrelevant', } main_host = config['hosts'][0] jobs = [] jobs.append(self.vcs_watch_job(localhost, project)) debian_wanted = project.get('debian', True) if debian_wanted is True: debian_hosts = config['hosts'] elif type(debian_wanted) is list: debian_hosts = [ x for x in config['hosts'] if x['debian-dist'] in debian_wanted] else: debian_hosts = [] for host in debian_hosts: jobs.append(self.prepare_worker_job(host, project)) jobs.append(self.build_job(host, project)) jobs.append(self.tarball_job(main_host, project)) done = set() for host in debian_hosts: if host['debian-dist'] not in done: jobs.append(self.dsc_job(host, project)) jobs.append(self.process_incoming_job( config, localhost, project, 'dsc_%s' % host['name'])) done.add(host['debian-dist']) build_all_dists = set() for host in debian_hosts: build_all = host['debian-dist'] not in build_all_dists build_all_dists.add(host['debian-dist']) jobs.append(self.deb_job(host, project, build_all)) jobs.append(self.process_incoming_job( config, localhost, project, 'deb_%s' % host['name'])) return [(jc.job_id(), jc.tostring()) for jc in jobs] def vcs_watch_job(self, host, project): '''Generate a job watch a VCS repository.''' jc = self.create_job_config(host, project, 'vcs-update') jc.set_name('%(project.name)s %(host.name)s VCS update') jc.set_description( 'Watch a version control repository: %(project.vcs)s') if 'bzr' in project: jc.set_bzr(project['bzr']) elif 'git' in project: jc.set_git(project['git']) else: raise cliapp.AppException('Unknown VCS') return jc def prepare_worker_job(self, host, project): '''Create job to prepare worker for build. Upgrades build environment, and configures dput. ''' jc = self.create_job_config(host, project, 'prepare-worker') jc.set_description('Upgrade build environment on %(host.ssh-target)s ' 'for %(project.name)s from %(project.vcs)s') jc.set_name('%(project.name)s %(host.name)s Prepare Worker') jc.add_ssh_command(''' cat << EOF | sudo tee /etc/apt/sources.list.d/jenkinstool.list deb %(global.artifacts-url)s/debian %(host.debian-dist)s main deb %(global.artifacts-url)s/debian %(host.debian-dist)s-ci main EOF sudo apt-get update || true sudo apt-get --no-remove --allow-unauthenticated -y upgrade || \ sudo apt-get -y -f install sudo apt-get --no-remove --allow-unauthenticated -y upgrade sudo apt-get clean cat << EOF > /var/lib/jenkins/.dput.cf [jenkins] fqdn = %(global.dput-host)s method = scp incoming = /var/www/jenkins/debian/incoming allow_unsigned_uploads = 1 check_version = 0 run_dinstall = 0 EOF ''') return jc def build_job(self, host, project): '''Generate a job to build a given project on a given host.''' jc = self.create_job_config(host, project, 'build') jc.set_description('Build %(project.name)s from %(project.vcs)s ' 'on %(host.ssh-target)s') jc.set_name('%(project.name)s %(host.name)s Build') self.add_clear_workspace(jc) self.add_get_source_from_vcs(jc) self.add_rsync_workspace_to_target(jc) if not self.pretend: jc.add_ssh_command(have_vcs_copy + prelude + ''' missing="$(dpkg-checkbuilddeps 2>&1 | sed -n '/^.*: Unmet build dependencies: /s///p' | sed 's/([^)]*)//g' | tr -s ' ' '\n')" case "$missing" in ?*) sudo apt-get install --no-remove --allow-unauthenticated -y $missing ; sudo apt-get clean ;; esac ''') jc.add_ssh_command('\n'.join(project['build-commands'])) self.add_rsync_workspace_from_target(jc) return jc def tarball_job(self, host, project): '''Create a job to create upstream release tarball.''' jc = self.create_job_config(host, project, 'tarball') jc.set_description('Create release tarball for %(project.name)s ' 'from %(project.vcs)s') jc.set_name('%(project.name)s %(host.name)s Tarball') self.add_clear_workspace(jc) self.add_get_source_from_vcs(jc) self.add_rsync_workspace_to_target(jc) jc.add_ssh_command(have_vcs_copy + prelude + ''' if ! url_exists "$tarball_url" then tempdir="$(mktemp -d)" dirname="$tempdir/${project}-${version}" if [ -e .bzr ] then bzr export "$dirname" elif [ -e .git ] then git archive --prefix="${project}-${version}/" HEAD | tar -C "$tempdir" -xf - else echo "Unknown VCS" 1>&2 exit 1 fi find . -delete tar -C "$tempdir" -caf "$tarball" --anchored \ --exclude "${project}-${version}/debian" \ "${project}-${version}" else echo "$artifacts/$tarball already exists, ignoring rebuilt version" fi ''') self.add_rsync_workspace_from_target(jc) jc.add_shell_command(have_vcs_copy + prelude + ''' mkdir -p "$artifacts" for x in * do if [ ! -e "$artifacts/$x" ] then mv "$x" "$artifacts/." fi done ''') return jc def dsc_job(self, host, project): '''Create a job to create Debian source package.''' jc = self.create_job_config(host, project, 'dsc') jc.set_description('Create Debian source package for %(project.name)s ' 'from %(project.vcs)s on %(host.ssh-target)s') jc.set_name('%(project.name)s %(host.name)s dsc') self.add_clear_workspace(jc) self.add_get_source_from_vcs(jc) self.add_rsync_workspace_to_target(jc) jc.add_ssh_command(have_vcs_copy + prelude + '''\ if url_exists "$dsc_url" then echo "$dsc_url already exists, not rebuilding" exit 0 fi if url_exists "$debian_dsc_url" then echo "$debian_dsc_url already exists, not rebuilding" exit 0 fi wget -O "../$origtgz" "$project_url/$tarball" # Should we include the .orig.tar.gz in the upload? if ! is_release "$project" "$version" then # It's not a release. dpkg-genchanges will do the right thing, so # we let it. srcopt=-si elif [ "$debian_dist" = unstable ] then # It's a release, and it's going to Debian unstable. dpkg-genchanges # will do the right thing. srcopt=-si else # It's a release, and it's going to code.liw.fi, as a backport-like # build. We tell dpkg-genchanges to include the source if the Debian # version is -1.* and otherwise not. case "$debianversion" in *-1.*) srcopt=-sa ;; *) srcopt=-sd ;; esac fi # Should we modify debian/changelog? We do not do that if it is a # release build, going to Debian unstable. if ! is_release "$project" "$version" then dch -b -v "$debianversion" --distribution "$debian_dist" "CI build" dch -r '' elif [ "$debian_dist" != unstable ] then dch -v "$debianversion" --distribution "$debian_dist" \ "Release backport build" dch -r '' fi debuild -S $srcopt -us -uc -i dput -f jenkins "../$changes" ''') self.add_rsync_workspace_from_target(jc) return jc def process_incoming_job(self, config, localhost, project, suffix): '''Create job that processing reprepro's incoming queue.''' jc = self.create_job_config(localhost, project, 'incoming_' + suffix) jc.set_name('%(project.name)s %(host.name)s incoming ' + suffix) jc.set_description('Process reprepro incoming queue') jc.add_shell_command( 'reprepro -b "%(global.repreprobase)s" --verbose ' 'processincoming default') return jc def deb_job(self, host, project, build_all): '''Create a job to build a Debian binary package on a given host.''' jc = self.create_job_config(host, project, 'deb') jc.set_description('Build Debian binary package for %(project.name)s ' 'from %(project.vcs)s on %(host.ssh-target)s') jc.set_name('%(project.name)s %(host.name)s deb') self.add_clear_workspace(jc) self.add_get_source_from_vcs(jc) self.add_rsync_workspace_to_target(jc) script = have_vcs_copy + prelude if build_all: script += ''' if grep '^Architecture:.*any' debian/control > /dev/null then url="$poolurl/$archany_changes" else url="$poolurl/$archall_changes" fi if url_exists "$url" then echo "$poolurl/$archall_changes exists, not rebuilding" exit 0 fi # We can just check for .dsc on Debian mirrors: if the source package # exists, the .deb will also exist. This saves us from having to # figure out the binary package names. if url_exists "$debian_dsc_url" then echo "$debian_dsc_url exists, not rebuilding" exit 0 fi binopt=-b ''' else: script += ''' if ! grep '^Architecture:.*any' debian/control > /dev/null then echo No architecture specific packages to build, skipping. exit 0 fi if url_exists "$poolurl/$archany_changes" then echo "$poolurl/$archany_changes exists, not rebuilding" exit 0 fi # We can just check for .dsc on Debian mirrors: if the source package # exists, the .deb will also exist. This saves us from having to # figure out the binary package names. if url_exists "$debian_dsc_url" then echo "$debian_dsc_url exists, not rebuilding" exit 0 fi binopt=-B ''' jc.add_ssh_command(script + ''' curdir="$(pwd)" temp="$(mktemp -d)" cd "$temp" dget -u "$dsc_url" mkdir result if man pbuilder | grep -e --allow-untrusted then untrusted="--allow-untrusted" else untrusted="" fi sudo pbuilder --update --basetgz "$basetgz" sudo pbuilder --build --basetgz "$basetgz" --buildresult result $untrusted \ --debbuildopts $binopt *.dsc dput jenkins "result/$archany_changes" cd "$curdir" rm -rf "$temp" ''') self.add_rsync_workspace_from_target(jc) return jc def create_job_config(self, host, project, suffix): jc = simplejenkinsapi.JobConfig() jc.set_job_suffix(suffix) if 'bzr' in project: project['vcs'] = project['bzr'] elif 'git' in project: project['vcs'] = project['git'] jc.add_param_dict('project', project) jc.add_param_dict('host', host) jc.add_param_dict('global', { 'artifacts': '/var/www/jenkins', 'artifacts-url': self.artifacts_url, 'dput-host': self.jenkins_host, 'repreprobase': '/var/www/jenkins/debian', 'debian-mirror': self.debian_mirror, 'pretend': 'true' if self.pretend else 'false', 'pbuilder-max-age': str(self.pbuilder_max_age), }) return jc def add_rsync_workspace_to_target(self, jc): jc.add_ssh_command('mkdir -p "%(host.directory)s"') jc.add_shell_command( 'rsync -aHS --delete-before . ' '"%(host.ssh-target)s:%(host.directory)s/."') def add_rsync_workspace_from_target(self, jc): jc.add_shell_command( 'rsync -aHS --delete-before ' '"%(host.ssh-target)s:%(host.directory)s/." .') def add_clear_workspace(self, jc): jc.add_shell_command('find . -delete') def add_get_source_from_vcs(self, jc): jc.add_shell_command( no_vcs_copy + prelude + 'cp -a "$vcsworkspace/." .') class JenkinsTool(cliapp.Application): '''Manipulate jobs in running Jenkins CI server.''' def add_settings(self): self.settings.boolean( ['pretend-jobs'], 'make generate-jobs create jobs that merely pretend to do things') self.settings.integer( ['pbuilder-max-age'], 'update pbuilder base.tgz if older than DAYS', metavar='DAYS', default=7) self.settings.string( ['jenkins-host'], 'address of host running jenkins') self.settings.string( ['mirror'], 'what Debian mirror to use', default='http://cdn.debian.net/debian') @property def jenkins_url(self): return 'http://%s:8080/' % self.settings['jenkins-host'] @property def artifacts_url(self): return 'http://%s/jenkins' % self.settings['jenkins-host'] def cmd_list_jobs(self, args): '''List all jobs on the server.''' jenkins = simplejenkinsapi.Jenkins(self.jenkins_url) for job_id in jenkins.list_jobs(): self.output.write('%s\n' % job_id) def cmd_delete_job(self, args): '''Remove specified jobs from server.''' jenkins = simplejenkinsapi.Jenkins(self.jenkins_url) for job_id in args: jenkins.delete_job(job_id) def cmd_delete_all_jobs(self, args): '''Remove all jobs from server.''' jenkins = simplejenkinsapi.Jenkins(self.jenkins_url) for job_id in jenkins.list_jobs(): jenkins.delete_job(job_id) def cmd_create_job(self, args): '''Create a job on the server.''' if len(args) != 2: raise cliapp.AppException( 'Need JOB-ID and CONFIG-FILENAME arguments') job_id, config_xml_filename = args with open(config_xml_filename) as f: jenkins = simplejenkinsapi.Jenkins(self.jenkins_url) jenkins.create_job(job_id, f.read()) def cmd_update_job(self, args): '''Update a job's config on the server.''' if len(args) != 2: raise cliapp.AppException( 'Need JOB-ID and CONFIG-FILENAME arguments') job_id, config_xml_filename = args with open(config_xml_filename) as f: jenkins = simplejenkinsapi.Jenkins(self.jenkins_url) jenkins.update_job(job_id, f.read()) def cmd_graph_projects(self, args): '''Output a graphviz file showing project dependencies.''' def find(config, name): for project in config['projects']: if project['name'] == name: return project return None def write_graphviz(f, config, project_names): done = set() f.write('strict digraph foo {\n') while project_names: name = project_names.pop() if name not in done: done.add(name) project = find(config, name) if project: for dep in project.get('build-depends', []): f.write('"%s" -> "%s";\n' % (name, dep)) project_names.append(dep) f.write('}\n') if len(args) == 0: return filename = args[0] with open(filename) as f: config = json.load(f) if len(args) > 1: project_names = args[1:] else: project_names = [p['name'] for p in config['projects']] write_graphviz(self.output, config, project_names) def cmd_run_jobs(self, args): '''Update Jenkmins with jobs based specification, then run them.''' self.settings.require('jenkins-host') if len(args) == 0: return filename = args[0] project_names = set(args[1:]) jenkins = simplejenkinsapi.Jenkins(self.jenkins_url) job_generator = JobGenerator(self.settings['pretend-jobs'], self.settings['pbuilder-max-age'], self.artifacts_url, self.settings['jenkins-host'], self.settings['mirror']) with open(filename) as f: try: config = json.load(f) except ValueError, e: raise cliapp.AppException('%s: %s' % (filename, str(e))) def make_it_so(job_id, config_xml): if job_id in jenkins.list_jobs(): jenkins.update_job(job_id, config_xml) else: jenkins.create_job(job_id, config_xml) job_ids = [] all_jobs = [] for job_id, config_xml in job_generator.generate_setup_jobs(config): make_it_so(job_id, config_xml) all_jobs.append(job_id) if not project_names: job_ids.append(job_id) found_projects = set() for project in simplejenkinsapi.order(config['projects']): pairs = job_generator.generate_project(config, project) for job_id, config_xml in pairs: make_it_so(job_id, config_xml) all_jobs.append(job_id) if not project_names or project['name'] in project_names: job_ids.append(job_id) found_projects.add(project['name']) if project_names: not_found = project_names.difference(found_projects) if not_found: raise cliapp.AppException( 'Could not find projects %s' % ' '.join(not_found)) for job_id in jenkins.list_jobs(): if job_id not in all_jobs: jenkins.delete_job(job_id) ts = ttystatus.TerminalStatus(period=0) ts.format('Running job %Index(job,jobs) %String(job)') ts['jobs'] = job_ids for job_id in job_ids: logging.info('Starting job %s' % job_id) ts['job'] = job_id prev = latest = jenkins.get_latest_build_number(job_id) jenkins.run_job(job_id) while latest == prev: time.sleep(1) latest = jenkins.get_latest_build_number(job_id) while True: time.sleep(1) info = jenkins.get_build_info(job_id, latest) if info['result'] is not None: break logging.info('Finished job %s' % job_id) if info['result'] != 'SUCCESS': ts.clear() ts.finish() url = 'http://%s:8080/job/%s/%s/console' % \ (self.settings['jenkins-host'], job_id, latest) raise cliapp.AppException('Job %s failed: %s' % (job_id, url)) ts.finish() JenkinsTool(version=simplejenkinsapi.__version__).run()