X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/6c43be47cb3756a0e6ffc924572259d1a1c8f2c3..6884eedcf015af4b79857d8a47ef93f844e6d190:/sdk/cwl/arvados_cwl/runner.py diff --git a/sdk/cwl/arvados_cwl/runner.py b/sdk/cwl/arvados_cwl/runner.py index 0cc23ab459..19cb7eae37 100644 --- a/sdk/cwl/arvados_cwl/runner.py +++ b/sdk/cwl/arvados_cwl/runner.py @@ -1,15 +1,26 @@ import os import urlparse from functools import partial +import logging +import json +import re +import cwltool.draft2tool from cwltool.draft2tool import CommandLineTool import cwltool.workflow -from cwltool.process import get_feature, scandeps, adjustFiles +from cwltool.process import get_feature, scandeps, UnsupportedRequirement from cwltool.load_tool import fetch_document +from cwltool.pathmapper import adjustFileObjs + +import arvados.collection from .arvdocker import arv_docker_get_image from .pathmapper import ArvPathMapper +logger = logging.getLogger('arvados.cwl-runner') + +cwltool.draft2tool.ACCEPTLIST_RE = re.compile(r"^[a-zA-Z0-9._+-]+$") + class Runner(object): def __init__(self, runner, tool, job_order, enable_reuse): self.arvrunner = runner @@ -17,6 +28,7 @@ class Runner(object): self.job_order = job_order self.running = False self.enable_reuse = enable_reuse + self.uuid = None def update_pipeline_component(self, record): pass @@ -34,40 +46,48 @@ class Runner(object): def arvados_job_spec(self, *args, **kwargs): self.upload_docker(self.tool) - workflowfiles = set() - jobfiles = set() - workflowfiles.add(self.tool.tool["id"]) + workflowfiles = [] + jobfiles = [] + workflowfiles.append({"class":"File", "location": self.tool.tool["id"]}) self.name = os.path.basename(self.tool.tool["id"]) def visitFiles(files, path): - files.add(path) - return path + files.append(path) document_loader, workflowobj, uri = fetch_document(self.tool.tool["id"]) + loaded = set() def loadref(b, u): - return document_loader.fetch(urlparse.urljoin(b, u)) + joined = urlparse.urljoin(b, u) + if joined not in loaded: + loaded.add(joined) + return document_loader.fetch(urlparse.urljoin(b, u)) + else: + return {} sc = scandeps(uri, workflowobj, set(("$import", "run")), - set(("$include", "$schemas", "path")), + set(("$include", "$schemas", "path", "location")), loadref) - adjustFiles(sc, partial(visitFiles, workflowfiles)) - adjustFiles(self.job_order, partial(visitFiles, jobfiles)) + adjustFileObjs(sc, partial(visitFiles, workflowfiles)) + adjustFileObjs(self.job_order, partial(visitFiles, jobfiles)) + keepprefix = kwargs.get("keepprefix", "") workflowmapper = ArvPathMapper(self.arvrunner, workflowfiles, "", - "%s", - "%s/%s", + keepprefix+"%s", + keepprefix+"%s/%s", name=self.name, **kwargs) jobmapper = ArvPathMapper(self.arvrunner, jobfiles, "", - "%s", - "%s/%s", + keepprefix+"%s", + keepprefix+"%s/%s", name=os.path.basename(self.job_order.get("id", "#")), **kwargs) - adjustFiles(self.job_order, lambda p: jobmapper.mapper(p)[1]) + def setloc(p): + p["location"] = jobmapper.mapper(p["location"])[1] + adjustFileObjs(self.job_order, setloc) if "id" in self.job_order: del self.job_order["id"] @@ -77,7 +97,15 @@ class Runner(object): def done(self, record): if record["state"] == "Complete": - processStatus = "success" + if record.get("exit_code") is not None: + if record["exit_code"] == 33: + processStatus = "UnsupportedRequirement" + elif record["exit_code"] == 0: + processStatus = "success" + else: + processStatus = "permanentFail" + else: + processStatus = "success" else: processStatus = "permanentFail" @@ -90,9 +118,11 @@ class Runner(object): def keepify(path): if not path.startswith("keep:"): return "keep:%s/%s" % (record["output"], path) + else: + return path adjustFiles(outputs, keepify) except Exception as e: logger.error("While getting final output object: %s", e) self.arvrunner.output_callback(outputs, processStatus) finally: - del self.arvrunner.jobs[record["uuid"]] + del self.arvrunner.processes[record["uuid"]]