Merge branch '9570-cwl-v1.0' closes #9570
[arvados.git] / sdk / cwl / arvados_cwl / runner.py
1 import os
2 import urlparse
3 from functools import partial
4 import logging
5 import json
6 import re
7
8 import cwltool.draft2tool
9 from cwltool.draft2tool import CommandLineTool
10 import cwltool.workflow
11 from cwltool.process import get_feature, scandeps, UnsupportedRequirement
12 from cwltool.load_tool import fetch_document
13 from cwltool.pathmapper import adjustFiles
14
15 import arvados.collection
16
17 from .arvdocker import arv_docker_get_image
18 from .pathmapper import ArvPathMapper
19
20 logger = logging.getLogger('arvados.cwl-runner')
21
22 cwltool.draft2tool.ACCEPTLIST_RE = re.compile(r"^[a-zA-Z0-9._+-]+$")
23
24 class Runner(object):
25     def __init__(self, runner, tool, job_order, enable_reuse):
26         self.arvrunner = runner
27         self.tool = tool
28         self.job_order = job_order
29         self.running = False
30         self.enable_reuse = enable_reuse
31         self.uuid = None
32
33     def update_pipeline_component(self, record):
34         pass
35
36     def upload_docker(self, tool):
37         if isinstance(tool, CommandLineTool):
38             (docker_req, docker_is_req) = get_feature(tool, "DockerRequirement")
39             if docker_req:
40                 arv_docker_get_image(self.arvrunner.api, docker_req, True, self.arvrunner.project_uuid)
41         elif isinstance(tool, cwltool.workflow.Workflow):
42             for s in tool.steps:
43                 self.upload_docker(s.embedded_tool)
44
45
46     def arvados_job_spec(self, *args, **kwargs):
47         self.upload_docker(self.tool)
48
49         workflowfiles = set()
50         jobfiles = set()
51         workflowfiles.add(self.tool.tool["id"])
52
53         self.name = os.path.basename(self.tool.tool["id"])
54
55         def visitFiles(files, path):
56             files.add(path)
57             return path
58
59         document_loader, workflowobj, uri = fetch_document(self.tool.tool["id"])
60         loaded = set()
61         def loadref(b, u):
62             joined = urlparse.urljoin(b, u)
63             if joined not in loaded:
64                 loaded.add(joined)
65                 return document_loader.fetch(urlparse.urljoin(b, u))
66             else:
67                 return {}
68
69         sc = scandeps(uri, workflowobj,
70                       set(("$import", "run")),
71                       set(("$include", "$schemas", "path")),
72                       loadref)
73         adjustFiles(sc, partial(visitFiles, workflowfiles))
74         adjustFiles(self.job_order, partial(visitFiles, jobfiles))
75
76         keepprefix = kwargs.get("keepprefix", "")
77         workflowmapper = ArvPathMapper(self.arvrunner, workflowfiles, "",
78                                        keepprefix+"%s",
79                                        keepprefix+"%s/%s",
80                                        name=self.name,
81                                        **kwargs)
82
83         jobmapper = ArvPathMapper(self.arvrunner, jobfiles, "",
84                                   keepprefix+"%s",
85                                   keepprefix+"%s/%s",
86                                   name=os.path.basename(self.job_order.get("id", "#")),
87                                   **kwargs)
88
89         adjustFiles(self.job_order, lambda p: jobmapper.mapper(p)[1])
90
91         if "id" in self.job_order:
92             del self.job_order["id"]
93
94         return workflowmapper
95
96
97     def done(self, record):
98         if record["state"] == "Complete":
99             if record.get("exit_code") is not None:
100                 if record["exit_code"] == 33:
101                     processStatus = "UnsupportedRequirement"
102                 elif record["exit_code"] == 0:
103                     processStatus = "success"
104                 else:
105                     processStatus = "permanentFail"
106             else:
107                 processStatus = "success"
108         else:
109             processStatus = "permanentFail"
110
111         outputs = None
112         try:
113             try:
114                 outc = arvados.collection.Collection(record["output"])
115                 with outc.open("cwl.output.json") as f:
116                     outputs = json.load(f)
117                 def keepify(path):
118                     if not path.startswith("keep:"):
119                         return "keep:%s/%s" % (record["output"], path)
120                     else:
121                         return path
122                 adjustFiles(outputs, keepify)
123             except Exception as e:
124                 logger.error("While getting final output object: %s", e)
125             self.arvrunner.output_callback(outputs, processStatus)
126         finally:
127             del self.arvrunner.processes[record["uuid"]]