#!/usr/bin/env python3
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
import argparse
+import errno
import functools
import glob
import locale
popen_kwargs[read_output] = subprocess.PIPE
proc = subprocess.Popen(cmd, **popen_kwargs)
with open(getattr(proc, read_output).fileno(), encoding=encoding) as output:
- matched_lines = [line for line in output
- if any(regexp.search(line) for regexp in regexps)]
+ matched_lines = []
+ for line in output:
+ if any(regexp.search(line) for regexp in regexps):
+ matched_lines.append(line)
+ if read_output == 'stderr':
+ print(line, file=sys.stderr, end='')
return proc.wait(), matched_lines
class TimestampFile:
def __init__(self, path):
self.path = path
+ # Make sure the dirname for `path` exists
+ p = os.path.dirname(path)
+ try:
+ os.makedirs(p)
+ except OSError as exc:
+ if exc.errno == errno.EEXIST and os.path.isdir(p):
+ pass
+ else:
+ raise
self.start_time = time.time()
def last_upload(self):
return -1
def update(self):
- os.close(os.open(self.path, os.O_CREAT | os.O_APPEND))
- os.utime(self.path, (time.time(), self.start_time))
-
+ try:
+ os.close(os.open(self.path, os.O_CREAT | os.O_APPEND))
+ os.utime(self.path, (time.time(), self.start_time))
+ except:
+ # when the packages directory is created/populated by a build in a
+ # docker container, as root, the script that runs the upload
+ # doesn't always have permission to touch a timestamp file there.
+ # In production, we build/upload from ephemeral machines, which
+ # means that the timestamp mechanism is not used. We print a
+ # warning and move on without erroring out.
+ print("Warning: unable to update timestamp file",self.path,"permission problem?")
+ pass
class PackageSuite:
NEED_SSH = False
class PythonPackageSuite(PackageSuite):
LOGGER_PART = 'python'
- REUPLOAD_REGEXP = re.compile(
- r'^error: Upload failed \(400\): A file named "[^"]+" already exists\b')
+ REUPLOAD_REGEXPS = [
+ re.compile(
+ r'^error: Upload failed \(400\): A file named "[^"]+" already exists\b'),
+ re.compile(
+ r'^error: Upload failed \(400\): File already exists\b'),
+ re.compile(
+ r'^error: Upload failed \(400\): Only one sdist may be uploaded per release\b'),
+ ]
def __init__(self, glob_root, rel_globs):
super().__init__(glob_root, rel_globs)
if src_dir in self.seen_packages:
return
self.seen_packages.add(src_dir)
- # NOTE: If we ever start uploading Python 3 packages, we'll need to
- # figure out some way to adapt cmd to match. It might be easiest
- # to give all our setup.py files the executable bit, and run that
- # directly.
# We also must run `sdist` before `upload`: `upload` uploads any
# distributions previously generated in the command. It doesn't
# know how to upload distributions already on disk. We write the
# result to a dedicated directory to avoid interfering with our
# timestamp tracking.
- cmd = ['python2.7', 'setup.py']
+ cmd = ['python3', 'setup.py']
if not self.logger.isEnabledFor(logging.INFO):
cmd.append('--quiet')
- cmd.extend(['sdist', '--dist-dir', '.upload_dist', 'upload'])
+ cmd.extend(['bdist_wheel', '--dist-dir', '.upload_dist'])
+ cmd.extend(['sdist', '--dist-dir', '.upload_dist'])
+ cmd.extend(['upload'])
upload_returncode, repushed = run_and_grep(
- cmd, 'stderr', self.REUPLOAD_REGEXP, cwd=src_dir)
+ cmd, 'stderr', *self.REUPLOAD_REGEXPS, cwd=src_dir)
if (upload_returncode != 0) and not repushed:
raise subprocess.CalledProcessError(upload_returncode, cmd)
shutil.rmtree(os.path.join(src_dir, '.upload_dist'))
class DebianPackageSuite(DistroPackageSuite):
- FREIGHT_SCRIPT = """
+ APT_SCRIPT = """
cd "$1"; shift
DISTNAME=$1; shift
-freight add "$@" "apt/$DISTNAME"
-freight cache "apt/$DISTNAME"
-rm "$@"
+for package in "$@"; do
+ set +e
+ aptly repo search "$DISTNAME" "${package%.deb}" >/dev/null 2>&1
+ RET=$?
+ set -e
+ if [[ $RET -eq 0 ]]; then
+ echo "Not adding $package, it is already present in repo $DISTNAME"
+ rm "$package"
+ else
+ aptly repo add -remove-files "$DISTNAME" "$package"
+ fi
+done
+aptly publish update "$DISTNAME" filesystem:"${DISTNAME%-*}":
"""
- TARGET_DISTNAMES = {
- 'debian7': 'wheezy',
- 'debian8': 'jessie',
- 'ubuntu1204': 'precise',
- 'ubuntu1404': 'trusty',
- 'ubuntu1604': 'xenial',
- }
+
+ def __init__(self, glob_root, rel_globs, target, ssh_host, ssh_opts, repo):
+ super().__init__(glob_root, rel_globs, target, ssh_host, ssh_opts)
+ self.TARGET_DISTNAMES = {
+ 'debian10': 'buster-'+repo,
+ 'debian11': 'bullseye-'+repo,
+ 'debian12': 'bookworm-'+repo,
+ 'ubuntu1804': 'bionic-'+repo,
+ 'ubuntu2004': 'focal-'+repo,
+ 'ubuntu2204': 'jammy-'+repo,
+ }
def post_uploads(self, paths):
- self._run_script(self.FREIGHT_SCRIPT, self.REMOTE_DEST_DIR + '/' + self.target,
+ self._run_script(self.APT_SCRIPT, self.REMOTE_DEST_DIR + '/' + self.target,
self.TARGET_DISTNAMES[self.target],
*self._paths_basenames(paths))
REPODIR=$1; shift
rpmsign --addsign "$@" </dev/null
mv "$@" "$REPODIR"
-createrepo "$REPODIR"
+createrepo -c ~/.createrepo-cache --update "$REPODIR"
"""
REPO_ROOT = '/var/www/rpm.arvados.org/'
- TARGET_REPODIRS = {
- 'centos6': 'CentOS/6/os/x86_64/',
- 'centos7': 'CentOS/7/os/x86_64/',
+
+ def __init__(self, glob_root, rel_globs, target, ssh_host, ssh_opts, repo):
+ super().__init__(glob_root, rel_globs, target, ssh_host, ssh_opts)
+ self.TARGET_REPODIRS = {
+ 'centos7': 'CentOS/7/%s/x86_64/' % repo,
+ 'rocky8': 'CentOS/8/%s/x86_64/' % repo,
}
def post_uploads(self, paths):
PACKAGE_SUITES = {
'python': _define_suite(PythonPackageSuite,
- 'sdk/pam/dist/*.tar.gz',
'sdk/python/dist/*.tar.gz',
'sdk/cwl/dist/*.tar.gz',
- 'services/nodemanager/dist/*.tar.gz',
'services/fuse/dist/*.tar.gz',
+ 'tools/crunchstat-summary/dist/*.tar.gz',
),
'gems': _define_suite(GemPackageSuite,
'sdk/ruby/*.gem',
'services/login-sync/*.gem',
),
}
-for target in ['debian7', 'debian8', 'ubuntu1204', 'ubuntu1404', 'ubuntu1604']:
- PACKAGE_SUITES[target] = _define_suite(
- DebianPackageSuite, os.path.join('packages', target, '*.deb'),
- target=target)
-for target in ['centos6', 'centos7']:
- PACKAGE_SUITES[target] = _define_suite(
- RedHatPackageSuite, os.path.join('packages', target, '*.rpm'),
- target=target)
def parse_arguments(arguments):
parser = argparse.ArgumentParser(
- prog="run_upload_packages.py",
description="Upload Arvados packages to various repositories")
parser.add_argument(
'--workspace', '-W', default=os.environ.get('WORKSPACE'),
metavar='OPTION', help="Pass option to `ssh -o`")
parser.add_argument('--verbose', '-v', action='count', default=0,
help="Log more information and subcommand output")
+ parser.add_argument(
+ '--repo', choices=['dev', 'testing'],
+ help="Whether to upload to dev (nightly) or testing (release candidate) repository")
+
parser.add_argument(
'targets', nargs='*', default=['all'], metavar='target',
help="Upload packages to these targets (default all)\nAvailable targets: " +
if args.workspace is None:
parser.error("workspace not set from command line or environment")
+
+ for target in [
+ 'debian10', 'debian11', 'debian12',
+ 'ubuntu1804', 'ubuntu2004', 'ubuntu2204',
+ ]:
+ PACKAGE_SUITES[target] = _define_suite(
+ DebianPackageSuite, os.path.join('packages', target, '*.deb'),
+ target=target, repo=args.repo)
+ for target in ['centos7', 'rocky8']:
+ PACKAGE_SUITES[target] = _define_suite(
+ RedHatPackageSuite, os.path.join('packages', target, '*.rpm'),
+ target=target, repo=args.repo)
+
for target in args.targets:
try:
suite_class = PACKAGE_SUITES[target].func
def main(arguments, stdout=sys.stdout, stderr=sys.stderr):
args = parse_arguments(arguments)
setup_logger(stderr, args)
+
for target in args.targets:
ts_file = TimestampFile(os.path.join(args.workspace, 'packages',
'.last_upload_%s' % target))