X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/8af4c785ee48083eac92a0bd1ccd0bf4c41a9410..eae48c31bb338689ec67fbc6a14a2e0b1fb5e3b6:/sdk/cwl/arvados_cwl/arvjob.py diff --git a/sdk/cwl/arvados_cwl/arvjob.py b/sdk/cwl/arvados_cwl/arvjob.py index 6fb0fb96cf..8a62204f8f 100644 --- a/sdk/cwl/arvados_cwl/arvjob.py +++ b/sdk/cwl/arvados_cwl/arvjob.py @@ -1,25 +1,28 @@ import logging import re import copy +import json +import time from cwltool.process import get_feature, shortname from cwltool.errors import WorkflowException from cwltool.draft2tool import revmap_file, CommandLineTool from cwltool.load_tool import fetch_document from cwltool.builder import Builder -from cwltool.pathmapper import PathMapper import arvados.collection from .arvdocker import arv_docker_get_image -from .runner import Runner +from .runner import Runner, arvados_jobs_image +from .pathmapper import InitialWorkDirPathMapper +from .perf import Perf from . import done +from ._version import __version__ logger = logging.getLogger('arvados.cwl-runner') +metrics = logging.getLogger('arvados.cwl-runner.metrics') -tmpdirre = re.compile(r"^\S+ \S+ \d+ \d+ stderr \S+ \S+ crunchrunner: \$\(task\.tmpdir\)=(.*)") -outdirre = re.compile(r"^\S+ \S+ \d+ \d+ stderr \S+ \S+ crunchrunner: \$\(task\.outdir\)=(.*)") -keepre = re.compile(r"^\S+ \S+ \d+ \d+ stderr \S+ \S+ crunchrunner: \$\(task\.keep\)=(.*)") +crunchrunner_re = re.compile(r"^\S+ \S+ \d+ \d+ stderr \S+ \S+ crunchrunner: \$\(task\.(tmpdir|outdir|keep)\)=(.*)") class ArvadosJob(object): """Submit and manage a Crunch job for executing a CWL CommandLineTool.""" @@ -35,23 +38,31 @@ class ArvadosJob(object): } runtime_constraints = {} - if self.generatefiles["listing"]: - vwd = arvados.collection.Collection() - script_parameters["task.vwd"] = {} - generatemapper = PathMapper([self.generatefiles], self.outdir, - ".", separateDirs=False) - for f, p in generatemapper.items(): - if p.type == "CreateFile": - with vwd.open(p.target, "w") as n: - n.write(p.resolved.encode("utf-8")) - vwd.save_new() - for f, p in generatemapper.items(): - if p.type == "File": - script_parameters["task.vwd"][p.target] = self.pathmapper.mapper(f).target - if p.type == "CreateFile": - script_parameters["task.vwd"][p.target] = "$(task.keep)/%s/%s" % (vwd.portable_data_hash(), p.target) - - script_parameters["task.env"] = {"TMPDIR": "$(task.tmpdir)"} + with Perf(metrics, "generatefiles %s" % self.name): + if self.generatefiles["listing"]: + vwd = arvados.collection.Collection(api_client=self.arvrunner.api, + keep_client=self.arvrunner.keep_client, + num_retries=self.arvrunner.num_retries) + script_parameters["task.vwd"] = {} + generatemapper = InitialWorkDirPathMapper([self.generatefiles], "", "", + separateDirs=False) + + with Perf(metrics, "createfiles %s" % self.name): + for f, p in generatemapper.items(): + if p.type == "CreateFile": + with vwd.open(p.target, "w") as n: + n.write(p.resolved.encode("utf-8")) + + with Perf(metrics, "generatefiles.save_new %s" % self.name): + vwd.save_new() + + for f, p in generatemapper.items(): + if p.type == "File": + script_parameters["task.vwd"][p.target] = p.resolved + if p.type == "CreateFile": + script_parameters["task.vwd"][p.target] = "$(task.keep)/%s/%s" % (vwd.portable_data_hash(), p.target) + + script_parameters["task.env"] = {"TMPDIR": self.tmpdir, "HOME": self.outdir} if self.environment: script_parameters["task.env"].update(self.environment) @@ -71,11 +82,14 @@ class ArvadosJob(object): if self.permanentFailCodes: script_parameters["task.permanentFailCodes"] = self.permanentFailCodes - (docker_req, docker_is_req) = get_feature(self, "DockerRequirement") - if docker_req and kwargs.get("use_container") is not False: - runtime_constraints["docker_image"] = arv_docker_get_image(self.arvrunner.api, docker_req, pull_image, self.arvrunner.project_uuid) - else: - runtime_constraints["docker_image"] = "arvados/jobs" + with Perf(metrics, "arv_docker_get_image %s" % self.name): + (docker_req, docker_is_req) = get_feature(self, "DockerRequirement") + if docker_req and kwargs.get("use_container") is not False: + if docker_req.get("dockerOutputDirectory"): + raise UnsupportedRequirement("Option 'dockerOutputDirectory' of DockerRequirement not supported.") + runtime_constraints["docker_image"] = arv_docker_get_image(self.arvrunner.api, docker_req, pull_image, self.arvrunner.project_uuid) + else: + runtime_constraints["docker_image"] = arvados_jobs_image(self.arvrunner) resources = self.builder.resources if resources is not None: @@ -83,6 +97,16 @@ class ArvadosJob(object): runtime_constraints["min_ram_mb_per_node"] = resources.get("ram") runtime_constraints["min_scratch_mb_per_node"] = resources.get("tmpdirSize", 0) + resources.get("outdirSize", 0) + runtime_req, _ = get_feature(self, "http://arvados.org/cwl#RuntimeConstraints") + if runtime_req: + if "keep_cache" in runtime_req: + runtime_constraints["keep_cache_mb_per_task"] = runtime_req["keep_cache"] + if "outputDirType" in runtime_req: + if runtime_req["outputDirType"] == "local_output_dir": + script_parameters["task.keepTmpOutput"] = False + elif runtime_req["outputDirType"] == "keep_output_dir": + script_parameters["task.keepTmpOutput"] = True + filters = [["repository", "=", "arvados"], ["script", "=", "crunchrunner"], ["script_version", "in git", "9e5b98e8f5f4727856b53447191f9c06e3da2ba6"]] @@ -90,19 +114,20 @@ class ArvadosJob(object): filters.append(["docker_image_locator", "in docker", runtime_constraints["docker_image"]]) try: - response = self.arvrunner.api.jobs().create( - body={ - "owner_uuid": self.arvrunner.project_uuid, - "script": "crunchrunner", - "repository": "arvados", - "script_version": "9570-cwl-v1.0", - "minimum_script_version": "9e5b98e8f5f4727856b53447191f9c06e3da2ba6", - "script_parameters": {"tasks": [script_parameters]}, - "runtime_constraints": runtime_constraints - }, - filters=filters, - find_or_create=kwargs.get("enable_reuse", True) - ).execute(num_retries=self.arvrunner.num_retries) + with Perf(metrics, "create %s" % self.name): + response = self.arvrunner.api.jobs().create( + body={ + "owner_uuid": self.arvrunner.project_uuid, + "script": "crunchrunner", + "repository": "arvados", + "script_version": "master", + "minimum_script_version": "9e5b98e8f5f4727856b53447191f9c06e3da2ba6", + "script_parameters": {"tasks": [script_parameters]}, + "runtime_constraints": runtime_constraints + }, + filters=filters, + find_or_create=kwargs.get("enable_reuse", True) + ).execute(num_retries=self.arvrunner.num_retries) self.arvrunner.processes[response["uuid"]] = self @@ -111,7 +136,8 @@ class ArvadosJob(object): logger.info("Job %s (%s) is %s", self.name, response["uuid"], response["state"]) if response["state"] in ("Complete", "Failed", "Cancelled"): - self.done(response) + with Perf(metrics, "done %s" % self.name): + self.done(response) except Exception as e: logger.error("Got error %s" % str(e)) self.output_callback({}, "permanentFail") @@ -119,7 +145,8 @@ class ArvadosJob(object): def update_pipeline_component(self, record): if self.arvrunner.pipeline: self.arvrunner.pipeline["components"][self.name] = {"job": record} - self.arvrunner.pipeline = self.arvrunner.api.pipeline_instances().update(uuid=self.arvrunner.pipeline["uuid"], + with Perf(metrics, "update_pipeline_component %s" % self.name): + self.arvrunner.pipeline = self.arvrunner.api.pipeline_instances().update(uuid=self.arvrunner.pipeline["uuid"], body={ "components": self.arvrunner.pipeline["components"] }).execute(num_retries=self.arvrunner.num_retries) @@ -151,37 +178,39 @@ class ArvadosJob(object): outputs = {} try: if record["output"]: - logc = arvados.collection.Collection(record["log"]) - log = logc.open(logc.keys()[0]) - tmpdir = None - outdir = None - keepdir = None - for l in log: - # Determine the tmpdir, outdir and keepdir paths from - # the job run. Unfortunately, we can't take the first - # values we find (which are expected to be near the - # top) and stop scanning because if the node fails and - # the job restarts on a different node these values - # will different runs, and we need to know about the - # final run that actually produced output. - - g = tmpdirre.match(l) - if g: - tmpdir = g.group(1) - g = outdirre.match(l) - if g: - outdir = g.group(1) - g = keepre.match(l) - if g: - keepdir = g.group(1) - - outputs = done.done(self, record, tmpdir, outdir, keepdir) + with Perf(metrics, "inspect log %s" % self.name): + logc = arvados.collection.CollectionReader(record["log"], + api_client=self.arvrunner.api, + keep_client=self.arvrunner.keep_client, + num_retries=self.arvrunner.num_retries) + log = logc.open(logc.keys()[0]) + dirs = {} + tmpdir = None + outdir = None + keepdir = None + for l in log: + # Determine the tmpdir, outdir and keepdir paths from + # the job run. Unfortunately, we can't take the first + # values we find (which are expected to be near the + # top) and stop scanning because if the node fails and + # the job restarts on a different node these values + # will different runs, and we need to know about the + # final run that actually produced output. + g = crunchrunner_re.match(l) + if g: + dirs[g.group(1)] = g.group(2) + + with Perf(metrics, "output collection %s" % self.name): + outputs = done.done(self, record, dirs["tmpdir"], + dirs["outdir"], dirs["keep"]) except WorkflowException as e: logger.error("Error while collecting job outputs:\n%s", e, exc_info=(e if self.arvrunner.debug else False)) processStatus = "permanentFail" + outputs = None except Exception as e: logger.exception("Got unknown exception while collecting job outputs:") processStatus = "permanentFail" + outputs = None self.output_callback(outputs, processStatus) finally: @@ -201,41 +230,62 @@ class RunnerJob(Runner): workflowmapper = super(RunnerJob, self).arvados_job_spec(dry_run=dry_run, pull_image=pull_image, **kwargs) - self.job_order["cwl:tool"] = workflowmapper.mapper(self.tool.tool["id"])[1] + # Need to filter this out, gets added by cwltool when providing + # parameters on the command line, and arv-run-pipeline-instance doesn't + # like it. + if "job_order" in self.job_order: + del self.job_order["job_order"] + + self.job_order["cwl:tool"] = workflowmapper.mapper(self.tool.tool["id"]).target[5:] + + if self.output_name: + self.job_order["arv:output_name"] = self.output_name + + self.job_order["arv:enable_reuse"] = self.enable_reuse + return { "script": "cwl-runner", - "script_version": "master", + "script_version": __version__, "repository": "arvados", "script_parameters": self.job_order, "runtime_constraints": { - "docker_image": "arvados/jobs" + "docker_image": arvados_jobs_image(self.arvrunner) } } def run(self, *args, **kwargs): job_spec = self.arvados_job_spec(*args, **kwargs) + job_spec.setdefault("owner_uuid", self.arvrunner.project_uuid) - response = self.arvrunner.api.jobs().create( + job = self.arvrunner.api.jobs().create( body=job_spec, find_or_create=self.enable_reuse ).execute(num_retries=self.arvrunner.num_retries) - self.uuid = response["uuid"] + for k,v in job_spec["script_parameters"].items(): + if v is False or v is None or isinstance(v, dict): + job_spec["script_parameters"][k] = {"value": v} + + del job_spec["owner_uuid"] + job_spec["job"] = job + self.arvrunner.pipeline = self.arvrunner.api.pipeline_instances().create( + body={ + "owner_uuid": self.arvrunner.project_uuid, + "name": shortname(self.tool.tool["id"]), + "components": {"cwl-runner": job_spec }, + "state": "RunningOnServer"}).execute(num_retries=self.arvrunner.num_retries) + logger.info("Created pipeline %s", self.arvrunner.pipeline["uuid"]) + + if kwargs.get("wait") is False: + self.uuid = self.arvrunner.pipeline["uuid"] + return + + self.uuid = job["uuid"] self.arvrunner.processes[self.uuid] = self - logger.info("Submitted job %s", response["uuid"]) - - if kwargs.get("submit"): - self.pipeline = self.arvrunner.api.pipeline_instances().create( - body={ - "owner_uuid": self.arvrunner.project_uuid, - "name": shortname(self.tool.tool["id"]), - "components": {"cwl-runner": {"job": {"uuid": self.uuid, "state": response["state"]} } }, - "state": "RunningOnClient"}).execute(num_retries=self.arvrunner.num_retries) - - if response["state"] in ("Complete", "Failed", "Cancelled"): - self.done(response) + if job["state"] in ("Complete", "Failed", "Cancelled"): + self.done(job) class RunnerTemplate(object): @@ -244,6 +294,7 @@ class RunnerTemplate(object): type_to_dataclass = { 'boolean': 'boolean', 'File': 'File', + 'Directory': 'Collection', 'float': 'number', 'int': 'number', 'string': 'text', @@ -256,7 +307,8 @@ class RunnerTemplate(object): runner=runner, tool=tool, job_order=job_order, - enable_reuse=enable_reuse) + enable_reuse=enable_reuse, + output_name=None) def pipeline_component_spec(self): """Return a component that Workbench and a-r-p-i will understand. @@ -264,6 +316,7 @@ class RunnerTemplate(object): Specifically, translate CWL input specs to Arvados pipeline format, like {"dataclass":"File","value":"xyz"}. """ + spec = self.job.arvados_job_spec() # Most of the component spec is exactly the same as the job @@ -297,7 +350,7 @@ class RunnerTemplate(object): # Title and description... title = param.pop('label', '') - descr = param.pop('description', '').rstrip('\n') + descr = param.pop('doc', '').rstrip('\n') if title: param['title'] = title if descr: @@ -310,8 +363,8 @@ class RunnerTemplate(object): pass elif not isinstance(value, dict): param['value'] = value - elif param.get('dataclass') == 'File' and value.get('path'): - param['value'] = value['path'] + elif param.get('dataclass') in ('File', 'Collection') and value.get('location'): + param['value'] = value['location'][5:] spec['script_parameters'][param_id] = param spec['script_parameters']['cwl:tool'] = job_params['cwl:tool']