10165: Add test for make_output_collection
[arvados.git] / sdk / cwl / arvados_cwl / arvcontainer.py
1 import logging
2 import json
3 import os
4
5 from cwltool.errors import WorkflowException
6 from cwltool.process import get_feature, UnsupportedRequirement, shortname
7 from cwltool.pathmapper import adjustFiles
8
9 import arvados.collection
10
11 from .arvdocker import arv_docker_get_image
12 from . import done
13 from .runner import Runner
14
15 logger = logging.getLogger('arvados.cwl-runner')
16
17 class ArvadosContainer(object):
18     """Submit and manage a Crunch container request for executing a CWL CommandLineTool."""
19
20     def __init__(self, runner):
21         self.arvrunner = runner
22         self.running = False
23         self.uuid = None
24
25     def update_pipeline_component(self, r):
26         pass
27
28     def run(self, dry_run=False, pull_image=True, **kwargs):
29         container_request = {
30             "command": self.command_line,
31             "owner_uuid": self.arvrunner.project_uuid,
32             "name": self.name,
33             "output_path": self.outdir,
34             "cwd": self.outdir,
35             "priority": 1,
36             "state": "Committed"
37         }
38         runtime_constraints = {}
39         mounts = {
40             self.outdir: {
41                 "kind": "tmp"
42             }
43         }
44
45         dirs = set()
46         for f in self.pathmapper.files():
47             _, p, tp = self.pathmapper.mapper(f)
48             if tp == "Directory" and '/' not in p[6:]:
49                 mounts[p] = {
50                     "kind": "collection",
51                     "portable_data_hash": p[6:]
52                 }
53                 dirs.add(p[6:])
54         for f in self.pathmapper.files():
55             _, p, tp = self.pathmapper.mapper(f)
56             if p[6:].split("/")[0] not in dirs:
57                 mounts[p] = {
58                     "kind": "collection",
59                     "portable_data_hash": p[6:]
60                 }
61
62         if self.generatefiles["listing"]:
63             raise UnsupportedRequirement("Generate files not supported")
64
65         container_request["environment"] = {"TMPDIR": self.tmpdir, "HOME": self.outdir}
66         if self.environment:
67             container_request["environment"].update(self.environment)
68
69         if self.stdin:
70             raise UnsupportedRequirement("Stdin redirection currently not suppported")
71
72         if self.stderr:
73             raise UnsupportedRequirement("Stderr redirection currently not suppported")
74
75         if self.stdout:
76             mounts["stdout"] = {"kind": "file",
77                                 "path": "%s/%s" % (self.outdir, self.stdout)}
78
79         (docker_req, docker_is_req) = get_feature(self, "DockerRequirement")
80         if not docker_req:
81             docker_req = {"dockerImageId": "arvados/jobs"}
82
83         container_request["container_image"] = arv_docker_get_image(self.arvrunner.api,
84                                                                      docker_req,
85                                                                      pull_image,
86                                                                      self.arvrunner.project_uuid)
87
88         resources = self.builder.resources
89         if resources is not None:
90             runtime_constraints["vcpus"] = resources.get("cores", 1)
91             runtime_constraints["ram"] = resources.get("ram") * 2**20
92
93         api_req, _ = get_feature(self, "http://arvados.org/cwl#APIRequirement")
94         if api_req:
95             runtime_constraints["API"] = True
96
97         runtime_req, _ = get_feature(self, "http://arvados.org/cwl#RuntimeConstraints")
98         if runtime_req:
99             logger.warn("RuntimeConstraints not yet supported by container API")
100
101         container_request["mounts"] = mounts
102         container_request["runtime_constraints"] = runtime_constraints
103
104         try:
105             response = self.arvrunner.api.container_requests().create(
106                 body=container_request
107             ).execute(num_retries=self.arvrunner.num_retries)
108
109             self.arvrunner.processes[response["container_uuid"]] = self
110
111             logger.info("Container %s (%s) request state is %s", self.name, response["uuid"], response["state"])
112
113             if response["state"] == "Final":
114                 self.done(response)
115         except Exception as e:
116             logger.error("Got error %s" % str(e))
117             self.output_callback({}, "permanentFail")
118
119     def done(self, record):
120         try:
121             if record["state"] == "Complete":
122                 rcode = record["exit_code"]
123                 if self.successCodes and rcode in self.successCodes:
124                     processStatus = "success"
125                 elif self.temporaryFailCodes and rcode in self.temporaryFailCodes:
126                     processStatus = "temporaryFail"
127                 elif self.permanentFailCodes and rcode in self.permanentFailCodes:
128                     processStatus = "permanentFail"
129                 elif rcode == 0:
130                     processStatus = "success"
131                 else:
132                     processStatus = "permanentFail"
133             else:
134                 processStatus = "permanentFail"
135
136             try:
137                 outputs = {}
138                 if record["output"]:
139                     outputs = done.done(self, record, "/tmp", self.outdir, "/keep")
140             except WorkflowException as e:
141                 logger.error("Error while collecting container outputs:\n%s", e, exc_info=(e if self.arvrunner.debug else False))
142                 processStatus = "permanentFail"
143             except Exception as e:
144                 logger.exception("Got unknown exception while collecting job outputs:")
145                 processStatus = "permanentFail"
146
147             self.output_callback(outputs, processStatus)
148         finally:
149             del self.arvrunner.processes[record["uuid"]]
150
151
152 class RunnerContainer(Runner):
153     """Submit and manage a container that runs arvados-cwl-runner."""
154
155     def arvados_job_spec(self, dry_run=False, pull_image=True, **kwargs):
156         """Create an Arvados container request for this workflow.
157
158         The returned dict can be used to create a container passed as
159         the +body+ argument to container_requests().create().
160         """
161
162         workflowmapper = super(RunnerContainer, self).arvados_job_spec(dry_run=dry_run, pull_image=pull_image, **kwargs)
163
164         with arvados.collection.Collection(api_client=self.arvrunner.api,
165                                            keep_client=self.arvrunner.keep_client,
166                                            num_retries=self.arvrunner.num_retries) as jobobj:
167             with jobobj.open("cwl.input.json", "w") as f:
168                 json.dump(self.job_order, f, sort_keys=True, indent=4)
169             jobobj.save_new(owner_uuid=self.arvrunner.project_uuid)
170
171         workflowname = os.path.basename(self.tool.tool["id"])
172         workflowpath = "/var/lib/cwl/workflow/%s" % workflowname
173         workflowcollection = workflowmapper.mapper(self.tool.tool["id"])[1]
174         workflowcollection = workflowcollection[5:workflowcollection.index('/')]
175         jobpath = "/var/lib/cwl/job/cwl.input.json"
176
177         container_image = arv_docker_get_image(self.arvrunner.api,
178                                                {"dockerImageId": "arvados/jobs"},
179                                                pull_image,
180                                                self.arvrunner.project_uuid)
181
182         return {
183             "command": ["arvados-cwl-runner", "--local", "--api=containers", workflowpath, jobpath],
184             "owner_uuid": self.arvrunner.project_uuid,
185             "name": self.name,
186             "output_path": "/var/spool/cwl",
187             "cwd": "/var/spool/cwl",
188             "priority": 1,
189             "state": "Committed",
190             "container_image": container_image,
191             "mounts": {
192                 "/var/lib/cwl/workflow": {
193                     "kind": "collection",
194                     "portable_data_hash": "%s" % workflowcollection
195                 },
196                 jobpath: {
197                     "kind": "collection",
198                     "portable_data_hash": "%s/cwl.input.json" % jobobj.portable_data_hash()
199                 },
200                 "stdout": {
201                     "kind": "file",
202                     "path": "/var/spool/cwl/cwl.output.json"
203                 },
204                 "/var/spool/cwl": {
205                     "kind": "collection",
206                     "writable": True
207                 }
208             },
209             "runtime_constraints": {
210                 "vcpus": 1,
211                 "ram": 1024*1024*256,
212                 "API": True
213             }
214         }
215
216     def run(self, *args, **kwargs):
217         kwargs["keepprefix"] = "keep:"
218         job_spec = self.arvados_job_spec(*args, **kwargs)
219         job_spec.setdefault("owner_uuid", self.arvrunner.project_uuid)
220
221         response = self.arvrunner.api.container_requests().create(
222             body=job_spec
223         ).execute(num_retries=self.arvrunner.num_retries)
224
225         self.uuid = response["uuid"]
226         self.arvrunner.processes[response["container_uuid"]] = self
227
228         logger.info("Submitted container %s", response["uuid"])
229
230         if response["state"] in ("Complete", "Failed", "Cancelled"):
231             self.done(response)