X-Git-Url: https://git.arvados.org/arvados-dev.git/blobdiff_plain/0be695eef3af4788a0890060a765e6b0d2c32334..1ff88838f9ca6a912411fb6bf3df217e1bcdcbe9:/jenkins/run_upload_packages.py diff --git a/jenkins/run_upload_packages.py b/jenkins/run_upload_packages.py index 5fc134c..0ef609e 100755 --- a/jenkins/run_upload_packages.py +++ b/jenkins/run_upload_packages.py @@ -1,16 +1,52 @@ #!/usr/bin/env python3 +# Copyright (C) The Arvados Authors. All rights reserved. +# +# SPDX-License-Identifier: AGPL-3.0 + import argparse import functools import glob +import locale import logging import os import pipes +import re import shutil import subprocess import sys import time +def run_and_grep(cmd, read_output, *regexps, + encoding=locale.getpreferredencoding(), **popen_kwargs): + """Run a subprocess and capture output lines matching regexps. + + Arguments: + * cmd: The command to run, as a list or string, as for subprocess.Popen. + * read_output: 'stdout' or 'stderr', the name of the output stream to read. + Remaining arguments are regexps to match output, as strings or compiled + regexp objects. Output lines matching any regexp will be captured. + + Keyword arguments: + * encoding: The encoding used to decode the subprocess output. + Remaining keyword arguments are passed directly to subprocess.Popen. + + Returns 2-tuple (subprocess returncode, list of matched output lines). + """ + regexps = [regexp if hasattr(regexp, 'search') else re.compile(regexp) + for regexp in regexps] + popen_kwargs[read_output] = subprocess.PIPE + proc = subprocess.Popen(cmd, **popen_kwargs) + with open(getattr(proc, read_output).fileno(), encoding=encoding) as output: + matched_lines = [] + for line in output: + if any(regexp.search(line) for regexp in regexps): + matched_lines.append(line) + if read_output == 'stderr': + print(line, file=sys.stderr, end='') + return proc.wait(), matched_lines + + class TimestampFile: def __init__(self, path): self.path = path @@ -62,6 +98,14 @@ class PackageSuite: class PythonPackageSuite(PackageSuite): LOGGER_PART = 'python' + REUPLOAD_REGEXPS = [ + re.compile( + r'^error: Upload failed \(400\): A file named "[^"]+" already exists\b'), + re.compile( + r'^error: Upload failed \(400\): File already exists\b'), + re.compile( + r'^error: Upload failed \(400\): Only one sdist may be uploaded per release\b'), + ] def __init__(self, glob_root, rel_globs): super().__init__(glob_root, rel_globs) @@ -85,24 +129,22 @@ class PythonPackageSuite(PackageSuite): if not self.logger.isEnabledFor(logging.INFO): cmd.append('--quiet') cmd.extend(['sdist', '--dist-dir', '.upload_dist', 'upload']) - subprocess.check_call(cmd, cwd=src_dir) + upload_returncode, repushed = run_and_grep( + cmd, 'stderr', *self.REUPLOAD_REGEXPS, cwd=src_dir) + if (upload_returncode != 0) and not repushed: + raise subprocess.CalledProcessError(upload_returncode, cmd) shutil.rmtree(os.path.join(src_dir, '.upload_dist')) class GemPackageSuite(PackageSuite): LOGGER_PART = 'gems' + REUPLOAD_REGEXP = re.compile(r'^Repushing of gem versions is not allowed\.$') def upload_file(self, path): cmd = ['gem', 'push', path] - push_proc = subprocess.Popen(cmd, stdout=subprocess.PIPE) - repushed = any(line == b'Repushing of gem versions is not allowed.\n' - for line in push_proc.stdout) - # Read any remaining stdout before closing. - for line in push_proc.stdout: - pass - push_proc.stdout.close() - if (push_proc.wait() != 0) and not repushed: - raise subprocess.CalledProcessError(push_proc.returncode, cmd) + push_returncode, repushed = run_and_grep(cmd, 'stdout', self.REUPLOAD_REGEXP) + if (push_returncode != 0) and not repushed: + raise subprocess.CalledProcessError(push_returncode, cmd) class DistroPackageSuite(PackageSuite): @@ -136,8 +178,11 @@ class DistroPackageSuite(PackageSuite): self.__class__.__name__, *(pipes.quote(s) for s in args))) def upload_files(self, paths): + dest_dir = os.path.join(self.REMOTE_DEST_DIR, self.target) + mkdir = self._build_cmd('ssh', self.ssh_host, 'install', '-d', dest_dir) + subprocess.check_call(mkdir) cmd = self._build_cmd('scp', *paths) - cmd.append('{self.ssh_host}:{self.REMOTE_DEST_DIR}'.format(self=self)) + cmd.append('{}:{}'.format(self.ssh_host, dest_dir)) subprocess.check_call(cmd) @@ -150,14 +195,16 @@ freight cache "apt/$DISTNAME" rm "$@" """ TARGET_DISTNAMES = { - 'debian7': 'wheezy', - 'debian8': 'jessie', - 'ubuntu1204': 'precise', - 'ubuntu1404': 'trusty', + 'debian8': 'jessie-dev', + 'debian9': 'stretch-dev', + 'debian10': 'buster-dev', + 'ubuntu1404': 'trusty-dev', + 'ubuntu1604': 'xenial-dev', + 'ubuntu1804': 'bionic-dev', } def post_uploads(self, paths): - self._run_script(self.FREIGHT_SCRIPT, self.REMOTE_DEST_DIR, + self._run_script(self.FREIGHT_SCRIPT, self.REMOTE_DEST_DIR + '/' + self.target, self.TARGET_DISTNAMES[self.target], *self._paths_basenames(paths)) @@ -172,13 +219,13 @@ createrepo "$REPODIR" """ REPO_ROOT = '/var/www/rpm.arvados.org/' TARGET_REPODIRS = { - 'centos6': 'CentOS/6/os/x86_64/' + 'centos7': 'CentOS/7/dev/x86_64/', } def post_uploads(self, paths): repo_dir = os.path.join(self.REPO_ROOT, self.TARGET_REPODIRS[self.target]) - self._run_script(self.CREATEREPO_SCRIPT, self.REMOTE_DEST_DIR, + self._run_script(self.CREATEREPO_SCRIPT, self.REMOTE_DEST_DIR + '/' + self.target, repo_dir, *self._paths_basenames(paths)) @@ -189,6 +236,7 @@ PACKAGE_SUITES = { 'python': _define_suite(PythonPackageSuite, 'sdk/pam/dist/*.tar.gz', 'sdk/python/dist/*.tar.gz', + 'sdk/cwl/dist/*.tar.gz', 'services/nodemanager/dist/*.tar.gz', 'services/fuse/dist/*.tar.gz', ), @@ -198,11 +246,11 @@ PACKAGE_SUITES = { 'services/login-sync/*.gem', ), } -for target in ['debian7', 'debian8', 'ubuntu1204', 'ubuntu1404']: +for target in ['debian8', 'debian9', 'debian10', 'ubuntu1404', 'ubuntu1604', 'ubuntu1804']: PACKAGE_SUITES[target] = _define_suite( DebianPackageSuite, os.path.join('packages', target, '*.deb'), target=target) -for target in ['centos6']: +for target in ['centos7']: PACKAGE_SUITES[target] = _define_suite( RedHatPackageSuite, os.path.join('packages', target, '*.rpm'), target=target) @@ -261,12 +309,12 @@ def build_suite_and_upload(target, since_timestamp, args): def main(arguments, stdout=sys.stdout, stderr=sys.stderr): args = parse_arguments(arguments) setup_logger(stderr, args) - ts_file = TimestampFile(os.path.join(args.workspace, 'packages', - '.last_upload')) - last_upload_ts = ts_file.last_upload() for target in args.targets: + ts_file = TimestampFile(os.path.join(args.workspace, 'packages', + '.last_upload_%s' % target)) + last_upload_ts = ts_file.last_upload() build_suite_and_upload(target, last_upload_ts, args) - ts_file.update() + ts_file.update() if __name__ == '__main__': main(sys.argv[1:])