Add support for ubuntu 18.04 (aka 'bionic') to run_upload_packages.py
[arvados-dev.git] / jenkins / run_upload_packages.py
1 #!/usr/bin/env python3
2
3 # Copyright (C) The Arvados Authors. All rights reserved.
4 #
5 # SPDX-License-Identifier: AGPL-3.0
6
7 import argparse
8 import functools
9 import glob
10 import locale
11 import logging
12 import os
13 import pipes
14 import re
15 import shutil
16 import subprocess
17 import sys
18 import time
19
20 def run_and_grep(cmd, read_output, *regexps,
21                  encoding=locale.getpreferredencoding(), **popen_kwargs):
22     """Run a subprocess and capture output lines matching regexps.
23
24     Arguments:
25     * cmd: The command to run, as a list or string, as for subprocess.Popen.
26     * read_output: 'stdout' or 'stderr', the name of the output stream to read.
27     Remaining arguments are regexps to match output, as strings or compiled
28     regexp objects.  Output lines matching any regexp will be captured.
29
30     Keyword arguments:
31     * encoding: The encoding used to decode the subprocess output.
32     Remaining keyword arguments are passed directly to subprocess.Popen.
33
34     Returns 2-tuple (subprocess returncode, list of matched output lines).
35     """
36     regexps = [regexp if hasattr(regexp, 'search') else re.compile(regexp)
37                for regexp in regexps]
38     popen_kwargs[read_output] = subprocess.PIPE
39     proc = subprocess.Popen(cmd, **popen_kwargs)
40     with open(getattr(proc, read_output).fileno(), encoding=encoding) as output:
41         matched_lines = []
42         for line in output:
43             if any(regexp.search(line) for regexp in regexps):
44                 matched_lines.append(line)
45             if read_output == 'stderr':
46                 print(line, file=sys.stderr, end='')
47     return proc.wait(), matched_lines
48
49
50 class TimestampFile:
51     def __init__(self, path):
52         self.path = path
53         self.start_time = time.time()
54
55     def last_upload(self):
56         try:
57             return os.path.getmtime(self.path)
58         except EnvironmentError:
59             return -1
60
61     def update(self):
62         os.close(os.open(self.path, os.O_CREAT | os.O_APPEND))
63         os.utime(self.path, (time.time(), self.start_time))
64
65
66 class PackageSuite:
67     NEED_SSH = False
68
69     def __init__(self, glob_root, rel_globs):
70         logger_part = getattr(self, 'LOGGER_PART', os.path.basename(glob_root))
71         self.logger = logging.getLogger('arvados-dev.upload.' + logger_part)
72         self.globs = [os.path.join(glob_root, rel_glob)
73                       for rel_glob in rel_globs]
74
75     def files_to_upload(self, since_timestamp):
76         for abs_glob in self.globs:
77             for path in glob.glob(abs_glob):
78                 if os.path.getmtime(path) >= since_timestamp:
79                     yield path
80
81     def upload_file(self, path):
82         raise NotImplementedError("PackageSuite.upload_file")
83
84     def upload_files(self, paths):
85         for path in paths:
86             self.logger.info("Uploading %s", path)
87             self.upload_file(path)
88
89     def post_uploads(self, paths):
90         pass
91
92     def update_packages(self, since_timestamp):
93         upload_paths = list(self.files_to_upload(since_timestamp))
94         if upload_paths:
95             self.upload_files(upload_paths)
96             self.post_uploads(upload_paths)
97
98
99 class PythonPackageSuite(PackageSuite):
100     LOGGER_PART = 'python'
101     REUPLOAD_REGEXPS = [
102         re.compile(
103             r'^error: Upload failed \(400\): A file named "[^"]+" already exists\b'),
104         re.compile(
105             r'^error: Upload failed \(400\): File already exists\b'),
106         re.compile(
107             r'^error: Upload failed \(400\): Only one sdist may be uploaded per release\b'),
108     ]
109
110     def __init__(self, glob_root, rel_globs):
111         super().__init__(glob_root, rel_globs)
112         self.seen_packages = set()
113
114     def upload_file(self, path):
115         src_dir = os.path.dirname(os.path.dirname(path))
116         if src_dir in self.seen_packages:
117             return
118         self.seen_packages.add(src_dir)
119         # NOTE: If we ever start uploading Python 3 packages, we'll need to
120         # figure out some way to adapt cmd to match.  It might be easiest
121         # to give all our setup.py files the executable bit, and run that
122         # directly.
123         # We also must run `sdist` before `upload`: `upload` uploads any
124         # distributions previously generated in the command.  It doesn't
125         # know how to upload distributions already on disk.  We write the
126         # result to a dedicated directory to avoid interfering with our
127         # timestamp tracking.
128         cmd = ['python2.7', 'setup.py']
129         if not self.logger.isEnabledFor(logging.INFO):
130             cmd.append('--quiet')
131         cmd.extend(['sdist', '--dist-dir', '.upload_dist', 'upload'])
132         upload_returncode, repushed = run_and_grep(
133             cmd, 'stderr', *self.REUPLOAD_REGEXPS, cwd=src_dir)
134         if (upload_returncode != 0) and not repushed:
135             raise subprocess.CalledProcessError(upload_returncode, cmd)
136         shutil.rmtree(os.path.join(src_dir, '.upload_dist'))
137
138
139 class GemPackageSuite(PackageSuite):
140     LOGGER_PART = 'gems'
141     REUPLOAD_REGEXP = re.compile(r'^Repushing of gem versions is not allowed\.$')
142
143     def upload_file(self, path):
144         cmd = ['gem', 'push', path]
145         push_returncode, repushed = run_and_grep(cmd, 'stdout', self.REUPLOAD_REGEXP)
146         if (push_returncode != 0) and not repushed:
147             raise subprocess.CalledProcessError(push_returncode, cmd)
148
149
150 class DistroPackageSuite(PackageSuite):
151     NEED_SSH = True
152     REMOTE_DEST_DIR = 'tmp'
153
154     def __init__(self, glob_root, rel_globs, target, ssh_host, ssh_opts):
155         super().__init__(glob_root, rel_globs)
156         self.target = target
157         self.ssh_host = ssh_host
158         self.ssh_opts = ['-o' + opt for opt in ssh_opts]
159         if not self.logger.isEnabledFor(logging.INFO):
160             self.ssh_opts.append('-q')
161
162     def _build_cmd(self, base_cmd, *args):
163         cmd = [base_cmd]
164         cmd.extend(self.ssh_opts)
165         cmd.extend(args)
166         return cmd
167
168     def _paths_basenames(self, paths):
169         return (os.path.basename(path) for path in paths)
170
171     def _run_script(self, script, *args):
172         # SSH will use a shell to run our bash command, so we have to
173         # quote our arguments.
174         # self.__class__.__name__ provides $0 for the script, which makes a
175         # nicer message if there's an error.
176         subprocess.check_call(self._build_cmd(
177                 'ssh', self.ssh_host, 'bash', '-ec', pipes.quote(script),
178                 self.__class__.__name__, *(pipes.quote(s) for s in args)))
179
180     def upload_files(self, paths):
181         dest_dir = os.path.join(self.REMOTE_DEST_DIR, self.target)
182         mkdir = self._build_cmd('ssh', self.ssh_host, 'install', '-d', dest_dir)
183         subprocess.check_call(mkdir)
184         cmd = self._build_cmd('scp', *paths)
185         cmd.append('{}:{}'.format(self.ssh_host, dest_dir))
186         subprocess.check_call(cmd)
187
188
189 class DebianPackageSuite(DistroPackageSuite):
190     FREIGHT_SCRIPT = """
191 cd "$1"; shift
192 DISTNAME=$1; shift
193 freight add "$@" "apt/$DISTNAME"
194 freight cache "apt/$DISTNAME"
195 rm "$@"
196 """
197     TARGET_DISTNAMES = {
198         'debian8': 'jessie-dev',
199         'debian9': 'stretch-dev',
200         'ubuntu1404': 'trusty-dev',
201         'ubuntu1604': 'xenial-dev',
202         'ubuntu1804': 'bionic-dev',
203         }
204
205     def post_uploads(self, paths):
206         self._run_script(self.FREIGHT_SCRIPT, self.REMOTE_DEST_DIR + '/' + self.target,
207                          self.TARGET_DISTNAMES[self.target],
208                          *self._paths_basenames(paths))
209
210
211 class RedHatPackageSuite(DistroPackageSuite):
212     CREATEREPO_SCRIPT = """
213 cd "$1"; shift
214 REPODIR=$1; shift
215 rpmsign --addsign "$@" </dev/null
216 mv "$@" "$REPODIR"
217 createrepo "$REPODIR"
218 """
219     REPO_ROOT = '/var/www/rpm.arvados.org/'
220     TARGET_REPODIRS = {
221         'centos7': 'CentOS/7/dev/x86_64/',
222         }
223
224     def post_uploads(self, paths):
225         repo_dir = os.path.join(self.REPO_ROOT,
226                                 self.TARGET_REPODIRS[self.target])
227         self._run_script(self.CREATEREPO_SCRIPT, self.REMOTE_DEST_DIR + '/' + self.target,
228                          repo_dir, *self._paths_basenames(paths))
229
230
231 def _define_suite(suite_class, *rel_globs, **kwargs):
232     return functools.partial(suite_class, rel_globs=rel_globs, **kwargs)
233
234 PACKAGE_SUITES = {
235     'python': _define_suite(PythonPackageSuite,
236                             'sdk/pam/dist/*.tar.gz',
237                             'sdk/python/dist/*.tar.gz',
238                             'sdk/cwl/dist/*.tar.gz',
239                             'services/nodemanager/dist/*.tar.gz',
240                             'services/fuse/dist/*.tar.gz',
241                         ),
242     'gems': _define_suite(GemPackageSuite,
243                           'sdk/ruby/*.gem',
244                           'sdk/cli/*.gem',
245                           'services/login-sync/*.gem',
246                       ),
247     }
248 for target in ['debian8', 'debian9', 'ubuntu1404', 'ubuntu1604', 'ubuntu1804']:
249     PACKAGE_SUITES[target] = _define_suite(
250         DebianPackageSuite, os.path.join('packages', target, '*.deb'),
251         target=target)
252 for target in ['centos7']:
253     PACKAGE_SUITES[target] = _define_suite(
254         RedHatPackageSuite, os.path.join('packages', target, '*.rpm'),
255         target=target)
256
257 def parse_arguments(arguments):
258     parser = argparse.ArgumentParser(
259         prog="run_upload_packages.py",
260         description="Upload Arvados packages to various repositories")
261     parser.add_argument(
262         '--workspace', '-W', default=os.environ.get('WORKSPACE'),
263         help="Arvados source directory with built packages to upload")
264     parser.add_argument(
265         '--ssh-host', '-H',
266         help="Host specification for distribution repository server")
267     parser.add_argument('-o', action='append', default=[], dest='ssh_opts',
268                          metavar='OPTION', help="Pass option to `ssh -o`")
269     parser.add_argument('--verbose', '-v', action='count', default=0,
270                         help="Log more information and subcommand output")
271     parser.add_argument(
272         'targets', nargs='*', default=['all'], metavar='target',
273         help="Upload packages to these targets (default all)\nAvailable targets: " +
274         ', '.join(sorted(PACKAGE_SUITES.keys())))
275     args = parser.parse_args(arguments)
276     if 'all' in args.targets:
277         args.targets = list(PACKAGE_SUITES.keys())
278
279     if args.workspace is None:
280         parser.error("workspace not set from command line or environment")
281     for target in args.targets:
282         try:
283             suite_class = PACKAGE_SUITES[target].func
284         except KeyError:
285             parser.error("unrecognized target {!r}".format(target))
286         if suite_class.NEED_SSH and (args.ssh_host is None):
287             parser.error(
288                 "--ssh-host must be specified to upload distribution packages")
289     return args
290
291 def setup_logger(stream_dest, args):
292     log_handler = logging.StreamHandler(stream_dest)
293     log_handler.setFormatter(logging.Formatter(
294             '%(asctime)s %(name)s[%(process)d] %(levelname)s: %(message)s',
295             '%Y-%m-%d %H:%M:%S'))
296     logger = logging.getLogger('arvados-dev.upload')
297     logger.addHandler(log_handler)
298     logger.setLevel(max(1, logging.WARNING - (10 * args.verbose)))
299
300 def build_suite_and_upload(target, since_timestamp, args):
301     suite_def = PACKAGE_SUITES[target]
302     kwargs = {}
303     if suite_def.func.NEED_SSH:
304         kwargs.update(ssh_host=args.ssh_host, ssh_opts=args.ssh_opts)
305     suite = suite_def(args.workspace, **kwargs)
306     suite.update_packages(since_timestamp)
307
308 def main(arguments, stdout=sys.stdout, stderr=sys.stderr):
309     args = parse_arguments(arguments)
310     setup_logger(stderr, args)
311     for target in args.targets:
312         ts_file = TimestampFile(os.path.join(args.workspace, 'packages',
313                                              '.last_upload_%s' % target))
314         last_upload_ts = ts_file.last_upload()
315         build_suite_and_upload(target, last_upload_ts, args)
316         ts_file.update()
317
318 if __name__ == '__main__':
319     main(sys.argv[1:])