Make dev/testing selectable instead of having a second copy-pasted script
[arvados-dev.git] / jenkins / run_upload_packages.py
1 #!/usr/bin/env python3
2
3 # Copyright (C) The Arvados Authors. All rights reserved.
4 #
5 # SPDX-License-Identifier: AGPL-3.0
6
7 import argparse
8 import functools
9 import glob
10 import locale
11 import logging
12 import os
13 import pipes
14 import re
15 import shutil
16 import subprocess
17 import sys
18 import time
19
20 def run_and_grep(cmd, read_output, *regexps,
21                  encoding=locale.getpreferredencoding(), **popen_kwargs):
22     """Run a subprocess and capture output lines matching regexps.
23
24     Arguments:
25     * cmd: The command to run, as a list or string, as for subprocess.Popen.
26     * read_output: 'stdout' or 'stderr', the name of the output stream to read.
27     Remaining arguments are regexps to match output, as strings or compiled
28     regexp objects.  Output lines matching any regexp will be captured.
29
30     Keyword arguments:
31     * encoding: The encoding used to decode the subprocess output.
32     Remaining keyword arguments are passed directly to subprocess.Popen.
33
34     Returns 2-tuple (subprocess returncode, list of matched output lines).
35     """
36     regexps = [regexp if hasattr(regexp, 'search') else re.compile(regexp)
37                for regexp in regexps]
38     popen_kwargs[read_output] = subprocess.PIPE
39     proc = subprocess.Popen(cmd, **popen_kwargs)
40     with open(getattr(proc, read_output).fileno(), encoding=encoding) as output:
41         matched_lines = []
42         for line in output:
43             if any(regexp.search(line) for regexp in regexps):
44                 matched_lines.append(line)
45             if read_output == 'stderr':
46                 print(line, file=sys.stderr, end='')
47     return proc.wait(), matched_lines
48
49
50 class TimestampFile:
51     def __init__(self, path):
52         self.path = path
53         self.start_time = time.time()
54
55     def last_upload(self):
56         try:
57             return os.path.getmtime(self.path)
58         except EnvironmentError:
59             return -1
60
61     def update(self):
62         os.close(os.open(self.path, os.O_CREAT | os.O_APPEND))
63         os.utime(self.path, (time.time(), self.start_time))
64
65
66 class PackageSuite:
67     NEED_SSH = False
68
69     def __init__(self, glob_root, rel_globs):
70         logger_part = getattr(self, 'LOGGER_PART', os.path.basename(glob_root))
71         self.logger = logging.getLogger('arvados-dev.upload.' + logger_part)
72         self.globs = [os.path.join(glob_root, rel_glob)
73                       for rel_glob in rel_globs]
74
75     def files_to_upload(self, since_timestamp):
76         for abs_glob in self.globs:
77             for path in glob.glob(abs_glob):
78                 if os.path.getmtime(path) >= since_timestamp:
79                     yield path
80
81     def upload_file(self, path):
82         raise NotImplementedError("PackageSuite.upload_file")
83
84     def upload_files(self, paths):
85         for path in paths:
86             self.logger.info("Uploading %s", path)
87             self.upload_file(path)
88
89     def post_uploads(self, paths):
90         pass
91
92     def update_packages(self, since_timestamp):
93         upload_paths = list(self.files_to_upload(since_timestamp))
94         if upload_paths:
95             self.upload_files(upload_paths)
96             self.post_uploads(upload_paths)
97
98
99 class PythonPackageSuite(PackageSuite):
100     LOGGER_PART = 'python'
101     REUPLOAD_REGEXPS = [
102         re.compile(
103             r'^error: Upload failed \(400\): A file named "[^"]+" already exists\b'),
104         re.compile(
105             r'^error: Upload failed \(400\): File already exists\b'),
106         re.compile(
107             r'^error: Upload failed \(400\): Only one sdist may be uploaded per release\b'),
108     ]
109
110     def __init__(self, glob_root, rel_globs):
111         super().__init__(glob_root, rel_globs)
112         self.seen_packages = set()
113
114     def upload_file(self, path):
115         src_dir = os.path.dirname(os.path.dirname(path))
116         if src_dir in self.seen_packages:
117             return
118         self.seen_packages.add(src_dir)
119         # NOTE: If we ever start uploading Python 3 packages, we'll need to
120         # figure out some way to adapt cmd to match.  It might be easiest
121         # to give all our setup.py files the executable bit, and run that
122         # directly.
123         # We also must run `sdist` before `upload`: `upload` uploads any
124         # distributions previously generated in the command.  It doesn't
125         # know how to upload distributions already on disk.  We write the
126         # result to a dedicated directory to avoid interfering with our
127         # timestamp tracking.
128         cmd = ['python2.7', 'setup.py']
129         if not self.logger.isEnabledFor(logging.INFO):
130             cmd.append('--quiet')
131         cmd.extend(['sdist', '--dist-dir', '.upload_dist', 'upload'])
132         upload_returncode, repushed = run_and_grep(
133             cmd, 'stderr', *self.REUPLOAD_REGEXPS, cwd=src_dir)
134         if (upload_returncode != 0) and not repushed:
135             raise subprocess.CalledProcessError(upload_returncode, cmd)
136         shutil.rmtree(os.path.join(src_dir, '.upload_dist'))
137
138
139 class GemPackageSuite(PackageSuite):
140     LOGGER_PART = 'gems'
141     REUPLOAD_REGEXP = re.compile(r'^Repushing of gem versions is not allowed\.$')
142
143     def upload_file(self, path):
144         cmd = ['gem', 'push', path]
145         push_returncode, repushed = run_and_grep(cmd, 'stdout', self.REUPLOAD_REGEXP)
146         if (push_returncode != 0) and not repushed:
147             raise subprocess.CalledProcessError(push_returncode, cmd)
148
149
150 class DistroPackageSuite(PackageSuite):
151     NEED_SSH = True
152     REMOTE_DEST_DIR = 'tmp'
153
154     def __init__(self, glob_root, rel_globs, target, ssh_host, ssh_opts):
155         super().__init__(glob_root, rel_globs)
156         self.target = target
157         self.ssh_host = ssh_host
158         self.ssh_opts = ['-o' + opt for opt in ssh_opts]
159         if not self.logger.isEnabledFor(logging.INFO):
160             self.ssh_opts.append('-q')
161
162     def _build_cmd(self, base_cmd, *args):
163         cmd = [base_cmd]
164         cmd.extend(self.ssh_opts)
165         cmd.extend(args)
166         return cmd
167
168     def _paths_basenames(self, paths):
169         return (os.path.basename(path) for path in paths)
170
171     def _run_script(self, script, *args):
172         # SSH will use a shell to run our bash command, so we have to
173         # quote our arguments.
174         # self.__class__.__name__ provides $0 for the script, which makes a
175         # nicer message if there's an error.
176         subprocess.check_call(self._build_cmd(
177                 'ssh', self.ssh_host, 'bash', '-ec', pipes.quote(script),
178                 self.__class__.__name__, *(pipes.quote(s) for s in args)))
179
180     def upload_files(self, paths):
181         dest_dir = os.path.join(self.REMOTE_DEST_DIR, self.target)
182         mkdir = self._build_cmd('ssh', self.ssh_host, 'install', '-d', dest_dir)
183         subprocess.check_call(mkdir)
184         cmd = self._build_cmd('scp', *paths)
185         cmd.append('{}:{}'.format(self.ssh_host, dest_dir))
186         subprocess.check_call(cmd)
187
188
189 class DebianPackageSuite(DistroPackageSuite):
190     FREIGHT_SCRIPT = """
191 cd "$1"; shift
192 DISTNAME=$1; shift
193 freight add "$@" "apt/$DISTNAME"
194 freight cache "apt/$DISTNAME"
195 rm "$@"
196 """
197
198     def __init__(self, glob_root, rel_globs, target, ssh_host, ssh_opts, repo):
199         super().__init__(glob_root, rel_globs, target, ssh_host, ssh_opts)
200         self.TARGET_DISTNAMES = {
201             'debian8': 'jessie-'+repo,
202             'debian9': 'stretch-'+repo,
203             'debian10': 'buster-'+repo,
204             'ubuntu1404': 'trusty-'+repo,
205             'ubuntu1604': 'xenial-'+repo,
206             'ubuntu1804': 'bionic-'+repo,
207             }
208
209     def post_uploads(self, paths):
210         self._run_script(self.FREIGHT_SCRIPT, self.REMOTE_DEST_DIR + '/' + self.target,
211                          self.TARGET_DISTNAMES[self.target],
212                          *self._paths_basenames(paths))
213
214
215 class RedHatPackageSuite(DistroPackageSuite):
216     CREATEREPO_SCRIPT = """
217 cd "$1"; shift
218 REPODIR=$1; shift
219 rpmsign --addsign "$@" </dev/null
220 mv "$@" "$REPODIR"
221 createrepo "$REPODIR"
222 """
223     REPO_ROOT = '/var/www/rpm.arvados.org/'
224
225     def __init__(self, glob_root, rel_globs, target, ssh_host, ssh_opts, repo):
226         super().__init__(glob_root, rel_globs, target, ssh_host, ssh_opts)
227         self.TARGET_REPODIRS = {
228             'centos7': 'CentOS/7/%s/x86_64/' % repo,
229         }
230
231     def post_uploads(self, paths):
232         repo_dir = os.path.join(self.REPO_ROOT,
233                                 self.TARGET_REPODIRS[self.target])
234         self._run_script(self.CREATEREPO_SCRIPT, self.REMOTE_DEST_DIR + '/' + self.target,
235                          repo_dir, *self._paths_basenames(paths))
236
237
238 def _define_suite(suite_class, *rel_globs, **kwargs):
239     return functools.partial(suite_class, rel_globs=rel_globs, **kwargs)
240
241 PACKAGE_SUITES = {
242     'python': _define_suite(PythonPackageSuite,
243                             'sdk/pam/dist/*.tar.gz',
244                             'sdk/python/dist/*.tar.gz',
245                             'sdk/cwl/dist/*.tar.gz',
246                             'services/nodemanager/dist/*.tar.gz',
247                             'services/fuse/dist/*.tar.gz',
248                         ),
249     'gems': _define_suite(GemPackageSuite,
250                           'sdk/ruby/*.gem',
251                           'sdk/cli/*.gem',
252                           'services/login-sync/*.gem',
253                       ),
254     }
255
256 def parse_arguments(arguments):
257     parser = argparse.ArgumentParser(
258         description="Upload Arvados packages to various repositories")
259     parser.add_argument(
260         '--workspace', '-W', default=os.environ.get('WORKSPACE'),
261         help="Arvados source directory with built packages to upload")
262     parser.add_argument(
263         '--ssh-host', '-H',
264         help="Host specification for distribution repository server")
265     parser.add_argument('-o', action='append', default=[], dest='ssh_opts',
266                          metavar='OPTION', help="Pass option to `ssh -o`")
267     parser.add_argument('--verbose', '-v', action='count', default=0,
268                         help="Log more information and subcommand output")
269     parser.add_argument(
270         '--repo', choices=['dev', 'testing']
271         help="Whether to upload to dev (nightly) or testing (release candidate) repository")
272
273     parser.add_argument(
274         'targets', nargs='*', default=['all'], metavar='target',
275         help="Upload packages to these targets (default all)\nAvailable targets: " +
276         ', '.join(sorted(PACKAGE_SUITES.keys())))
277     args = parser.parse_args(arguments)
278     if 'all' in args.targets:
279         args.targets = list(PACKAGE_SUITES.keys())
280
281     if args.workspace is None:
282         parser.error("workspace not set from command line or environment")
283     for target in args.targets:
284         try:
285             suite_class = PACKAGE_SUITES[target].func
286         except KeyError:
287             parser.error("unrecognized target {!r}".format(target))
288         if suite_class.NEED_SSH and (args.ssh_host is None):
289             parser.error(
290                 "--ssh-host must be specified to upload distribution packages")
291     return args
292
293 def setup_logger(stream_dest, args):
294     log_handler = logging.StreamHandler(stream_dest)
295     log_handler.setFormatter(logging.Formatter(
296             '%(asctime)s %(name)s[%(process)d] %(levelname)s: %(message)s',
297             '%Y-%m-%d %H:%M:%S'))
298     logger = logging.getLogger('arvados-dev.upload')
299     logger.addHandler(log_handler)
300     logger.setLevel(max(1, logging.WARNING - (10 * args.verbose)))
301
302 def build_suite_and_upload(target, since_timestamp, args):
303     suite_def = PACKAGE_SUITES[target]
304     kwargs = {}
305     if suite_def.func.NEED_SSH:
306         kwargs.update(ssh_host=args.ssh_host, ssh_opts=args.ssh_opts)
307     suite = suite_def(args.workspace, **kwargs)
308     suite.update_packages(since_timestamp)
309
310 def main(arguments, stdout=sys.stdout, stderr=sys.stderr):
311     args = parse_arguments(arguments)
312     setup_logger(stderr, args)
313
314     for target in ['debian8', 'debian9', 'debian10', 'ubuntu1404', 'ubuntu1604', 'ubuntu1804']:
315         PACKAGE_SUITES[target] = _define_suite(
316             DebianPackageSuite, os.path.join('packages', target, '*.deb'),
317             target=target, repo=args.repo)
318     for target in ['centos7']:
319         PACKAGE_SUITES[target] = _define_suite(
320             RedHatPackageSuite, os.path.join('packages', target, '*.rpm'),
321             target=target, repo=args.repo)
322
323     for target in args.targets:
324         ts_file = TimestampFile(os.path.join(args.workspace, 'packages',
325                                              '.last_upload_%s' % target))
326         last_upload_ts = ts_file.last_upload()
327         build_suite_and_upload(target, last_upload_ts, args)
328         ts_file.update()
329
330 if __name__ == '__main__':
331     main(sys.argv[1:])