import locale
import logging
import os
-import pipes
import re
+import shlex
import shutil
import subprocess
import sys
try:
os.makedirs(p)
except OSError as exc:
- if exc.errno == errno.EEXIST and os.path.isdir(path):
+ if exc.errno == errno.EEXIST and os.path.isdir(p):
pass
else:
raise
return -1
def update(self):
- os.close(os.open(self.path, os.O_CREAT | os.O_APPEND))
- os.utime(self.path, (time.time(), self.start_time))
-
+ try:
+ os.close(os.open(self.path, os.O_CREAT | os.O_APPEND))
+ os.utime(self.path, (time.time(), self.start_time))
+ except:
+ # when the packages directory is created/populated by a build in a
+ # docker container, as root, the script that runs the upload
+ # doesn't always have permission to touch a timestamp file there.
+ # In production, we build/upload from ephemeral machines, which
+ # means that the timestamp mechanism is not used. We print a
+ # warning and move on without erroring out.
+ print("Warning: unable to update timestamp file",self.path,"permission problem?")
+ pass
class PackageSuite:
NEED_SSH = False
class PythonPackageSuite(PackageSuite):
LOGGER_PART = 'python'
- REUPLOAD_REGEXPS = [
- re.compile(
- r'^error: Upload failed \(400\): A file named "[^"]+" already exists\b'),
- re.compile(
- r'^error: Upload failed \(400\): File already exists\b'),
- re.compile(
- r'^error: Upload failed \(400\): Only one sdist may be uploaded per release\b'),
- ]
-
- def __init__(self, glob_root, rel_globs):
- super().__init__(glob_root, rel_globs)
- self.seen_packages = set()
def upload_file(self, path):
- src_dir = os.path.dirname(os.path.dirname(path))
- if src_dir in self.seen_packages:
- return
- self.seen_packages.add(src_dir)
- # We also must run `sdist` before `upload`: `upload` uploads any
- # distributions previously generated in the command. It doesn't
- # know how to upload distributions already on disk. We write the
- # result to a dedicated directory to avoid interfering with our
- # timestamp tracking.
- cmd = ['python3', 'setup.py']
- if not self.logger.isEnabledFor(logging.INFO):
- cmd.append('--quiet')
- cmd.extend(['bdist_wheel', '--dist-dir', '.upload_dist'])
- cmd.extend(['sdist', '--dist-dir', '.upload_dist'])
- cmd.extend(['upload'])
- upload_returncode, repushed = run_and_grep(
- cmd, 'stderr', *self.REUPLOAD_REGEXPS, cwd=src_dir)
- if (upload_returncode != 0) and not repushed:
- raise subprocess.CalledProcessError(upload_returncode, cmd)
- shutil.rmtree(os.path.join(src_dir, '.upload_dist'))
+ subprocess.run([
+ 'twine', 'upload',
+ '--disable-progress-bar',
+ '--non-interactive',
+ '--skip-existing',
+ path,
+ ], stdin=subprocess.DEVNULL, check=True)
class GemPackageSuite(PackageSuite):
# self.__class__.__name__ provides $0 for the script, which makes a
# nicer message if there's an error.
subprocess.check_call(self._build_cmd(
- 'ssh', self.ssh_host, 'bash', '-ec', pipes.quote(script),
- self.__class__.__name__, *(pipes.quote(s) for s in args)))
+ 'ssh', self.ssh_host, 'bash', '-ec', shlex.quote(script),
+ self.__class__.__name__, *(shlex.quote(s) for s in args)))
def upload_files(self, paths):
dest_dir = os.path.join(self.REMOTE_DEST_DIR, self.target)
class DebianPackageSuite(DistroPackageSuite):
APT_SCRIPT = """
+set -e
cd "$1"; shift
DISTNAME=$1; shift
+# aptly implements its own locking, but its wait strategy as of April 2024 is
+# not patient enough to accommodate multiple simultaneous uploads.
+APTLY_LOCK="${XDG_RUNTIME_DIR:-/tmp}/aptly-upload.lock"
+aptly() {
+ flock --wait=300 "$APTLY_LOCK" aptly "$@"
+}
for package in "$@"; do
- set +e
- aptly repo search "$DISTNAME" "${package%.deb}" >/dev/null 2>&1
- RET=$?
- set -e
- if [[ $RET -eq 0 ]]; then
+ if aptly repo search "$DISTNAME" "${package%.deb}" >/dev/null 2>&1; then
echo "Not adding $package, it is already present in repo $DISTNAME"
rm "$package"
else
def __init__(self, glob_root, rel_globs, target, ssh_host, ssh_opts, repo):
super().__init__(glob_root, rel_globs, target, ssh_host, ssh_opts)
self.TARGET_DISTNAMES = {
- 'debian8': 'jessie-'+repo,
- 'debian9': 'stretch-'+repo,
'debian10': 'buster-'+repo,
- 'ubuntu1404': 'trusty-'+repo,
- 'ubuntu1604': 'xenial-'+repo,
+ 'debian11': 'bullseye-'+repo,
+ 'debian12': 'bookworm-'+repo,
'ubuntu1804': 'bionic-'+repo,
'ubuntu2004': 'focal-'+repo,
+ 'ubuntu2204': 'jammy-'+repo,
}
def post_uploads(self, paths):
REPODIR=$1; shift
rpmsign --addsign "$@" </dev/null
mv "$@" "$REPODIR"
-createrepo "$REPODIR"
+createrepo_c -c ~/.createrepo-cache --update "$REPODIR"
"""
REPO_ROOT = '/var/www/rpm.arvados.org/'
super().__init__(glob_root, rel_globs, target, ssh_host, ssh_opts)
self.TARGET_REPODIRS = {
'centos7': 'CentOS/7/%s/x86_64/' % repo,
+ 'rocky8': 'CentOS/8/%s/x86_64/' % repo,
}
def post_uploads(self, paths):
PACKAGE_SUITES = {
'python': _define_suite(PythonPackageSuite,
- 'sdk/python/dist/*.tar.gz',
'sdk/cwl/dist/*.tar.gz',
+ 'sdk/cwl/dist/*.whl',
+ 'sdk/python/dist/*.tar.gz',
+ 'sdk/python/dist/*.whl',
'services/fuse/dist/*.tar.gz',
+ 'services/fuse/dist/*.whl',
+ 'tools/crunchstat-summary/dist/*.tar.gz',
+ 'tools/crunchstat-summary/dist/*.whl',
),
'gems': _define_suite(GemPackageSuite,
'sdk/ruby/*.gem',
if args.workspace is None:
parser.error("workspace not set from command line or environment")
- for target in ['debian8', 'debian9', 'debian10', 'ubuntu1404', 'ubuntu1604', 'ubuntu1804', 'ubuntu2004']:
+ for target in [
+ 'debian10', 'debian11', 'debian12',
+ 'ubuntu1804', 'ubuntu2004', 'ubuntu2204',
+ ]:
PACKAGE_SUITES[target] = _define_suite(
DebianPackageSuite, os.path.join('packages', target, '*.deb'),
target=target, repo=args.repo)
- for target in ['centos7']:
+ for target in ['centos7', 'rocky8']:
PACKAGE_SUITES[target] = _define_suite(
RedHatPackageSuite, os.path.join('packages', target, '*.rpm'),
target=target, repo=args.repo)