X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/4b4a0917a967c0ec2dd7b72c9665e0859022f120..1166aeb6033725709ded753a0c00f69320a9a873:/sdk/cwl/arvados_cwl/arvjob.py diff --git a/sdk/cwl/arvados_cwl/arvjob.py b/sdk/cwl/arvados_cwl/arvjob.py index 25f64ea230..11efc0c1c3 100644 --- a/sdk/cwl/arvados_cwl/arvjob.py +++ b/sdk/cwl/arvados_cwl/arvjob.py @@ -2,21 +2,27 @@ # # SPDX-License-Identifier: Apache-2.0 +from past.builtins import basestring +from builtins import object +from future.utils import viewitems + import logging import re import copy import json import time -from cwltool.process import get_feature, shortname, UnsupportedRequirement +from cwltool.process import shortname, UnsupportedRequirement from cwltool.errors import WorkflowException -from cwltool.draft2tool import revmap_file, CommandLineTool +from cwltool.command_line_tool import revmap_file, CommandLineTool from cwltool.load_tool import fetch_document from cwltool.builder import Builder from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, visit_class +from cwltool.job import JobBase from schema_salad.sourceline import SourceLine +import arvados_cwl.util import ruamel.yaml as yaml import arvados.collection @@ -28,6 +34,7 @@ from .pathmapper import VwdPathMapper, trim_listing from .perf import Perf from . import done from ._version import __version__ +from .util import get_intermediate_collection_info logger = logging.getLogger('arvados.cwl-runner') metrics = logging.getLogger('arvados.cwl-runner.metrics') @@ -36,15 +43,23 @@ crunchrunner_re = re.compile(r"^.*crunchrunner: \$\(task\.(tmpdir|outdir|keep)\) crunchrunner_git_commit = 'a3f2cb186e437bfce0031b024b2157b73ed2717d' -class ArvadosJob(object): +class ArvadosJob(JobBase): """Submit and manage a Crunch job for executing a CWL CommandLineTool.""" - def __init__(self, runner): + def __init__(self, runner, + builder, # type: Builder + joborder, # type: Dict[Text, Union[Dict[Text, Any], List, Text]] + make_path_mapper, # type: Callable[..., PathMapper] + requirements, # type: List[Dict[Text, Text]] + hints, # type: List[Dict[Text, Text]] + name # type: Text + ): + super(ArvadosJob, self).__init__(builder, joborder, make_path_mapper, requirements, hints, name) self.arvrunner = runner self.running = False self.uuid = None - def run(self, dry_run=False, pull_image=True, **kwargs): + def run(self, runtimeContext): script_parameters = { "command": self.command_line } @@ -56,7 +71,7 @@ class ArvadosJob(object): keep_client=self.arvrunner.keep_client, num_retries=self.arvrunner.num_retries) script_parameters["task.vwd"] = {} - generatemapper = VwdPathMapper([self.generatefiles], "", "", + generatemapper = VwdPathMapper(self.generatefiles["listing"], "", "", separateDirs=False) with Perf(metrics, "createfiles %s" % self.name): @@ -67,7 +82,12 @@ class ArvadosJob(object): if vwd: with Perf(metrics, "generatefiles.save_new %s" % self.name): - vwd.save_new() + info = get_intermediate_collection_info(self.name, None, runtimeContext.intermediate_output_ttl) + vwd.save_new(name=info["name"], + owner_uuid=self.arvrunner.project_uuid, + ensure_unique_name=True, + trash_at=info["trash_at"], + properties=info["properties"]) for f, p in generatemapper.items(): if p.type == "File": @@ -96,12 +116,15 @@ class ArvadosJob(object): script_parameters["task.permanentFailCodes"] = self.permanentFailCodes with Perf(metrics, "arv_docker_get_image %s" % self.name): - (docker_req, docker_is_req) = get_feature(self, "DockerRequirement") - if docker_req and kwargs.get("use_container") is not False: + (docker_req, docker_is_req) = self.get_requirement("DockerRequirement") + if docker_req and runtimeContext.use_container is not False: if docker_req.get("dockerOutputDirectory"): raise SourceLine(docker_req, "dockerOutputDirectory", UnsupportedRequirement).makeError( "Option 'dockerOutputDirectory' of DockerRequirement not supported.") - runtime_constraints["docker_image"] = arv_docker_get_image(self.arvrunner.api, docker_req, pull_image, self.arvrunner.project_uuid) + runtime_constraints["docker_image"] = arv_docker_get_image(self.arvrunner.api, + docker_req, + runtimeContext.pull_image, + self.arvrunner.project_uuid) else: runtime_constraints["docker_image"] = "arvados/jobs" @@ -111,7 +134,7 @@ class ArvadosJob(object): runtime_constraints["min_ram_mb_per_node"] = resources.get("ram") runtime_constraints["min_scratch_mb_per_node"] = resources.get("tmpdirSize", 0) + resources.get("outdirSize", 0) - runtime_req, _ = get_feature(self, "http://arvados.org/cwl#RuntimeConstraints") + runtime_req, _ = self.get_requirement("http://arvados.org/cwl#RuntimeConstraints") if runtime_req: if "keep_cache" in runtime_req: runtime_constraints["keep_cache_mb_per_task"] = runtime_req["keep_cache"] @@ -128,12 +151,14 @@ class ArvadosJob(object): if not self.arvrunner.ignore_docker_for_reuse: filters.append(["docker_image_locator", "in docker", runtime_constraints["docker_image"]]) - enable_reuse = kwargs.get("enable_reuse", True) + enable_reuse = runtimeContext.enable_reuse if enable_reuse: - reuse_req, _ = get_feature(self, "http://arvados.org/cwl#ReuseRequirement") + reuse_req, _ = self.get_requirement("http://arvados.org/cwl#ReuseRequirement") if reuse_req: enable_reuse = reuse_req["enableReuse"] + self.output_callback = self.arvrunner.get_wrapped_callback(self.output_callback) + try: with Perf(metrics, "create %s" % self.name): response = self.arvrunner.api.jobs().create( @@ -150,7 +175,8 @@ class ArvadosJob(object): find_or_create=enable_reuse ).execute(num_retries=self.arvrunner.num_retries) - self.arvrunner.processes[response["uuid"]] = self + self.uuid = response["uuid"] + self.arvrunner.process_submitted(self) self.update_pipeline_component(response) @@ -171,37 +197,35 @@ class ArvadosJob(object): logger.info("Creating read permission on job %s: %s", response["uuid"], e) - - with Perf(metrics, "done %s" % self.name): - self.done(response) else: logger.info("%s %s is %s", self.arvrunner.label(self), response["uuid"], response["state"]) - except Exception as e: + except Exception: logger.exception("%s error" % (self.arvrunner.label(self))) self.output_callback({}, "permanentFail") def update_pipeline_component(self, record): - if self.arvrunner.pipeline: - self.arvrunner.pipeline["components"][self.name] = {"job": record} - with Perf(metrics, "update_pipeline_component %s" % self.name): - self.arvrunner.pipeline = self.arvrunner.api.pipeline_instances().update( - uuid=self.arvrunner.pipeline["uuid"], - body={ - "components": self.arvrunner.pipeline["components"] - }).execute(num_retries=self.arvrunner.num_retries) - if self.arvrunner.uuid: - try: - job = self.arvrunner.api.jobs().get(uuid=self.arvrunner.uuid).execute() - if job: - components = job["components"] - components[self.name] = record["uuid"] - self.arvrunner.api.jobs().update( - uuid=self.arvrunner.uuid, + with self.arvrunner.workflow_eval_lock: + if self.arvrunner.pipeline: + self.arvrunner.pipeline["components"][self.name] = {"job": record} + with Perf(metrics, "update_pipeline_component %s" % self.name): + self.arvrunner.pipeline = self.arvrunner.api.pipeline_instances().update( + uuid=self.arvrunner.pipeline["uuid"], body={ - "components": components + "components": self.arvrunner.pipeline["components"] }).execute(num_retries=self.arvrunner.num_retries) - except Exception as e: - logger.info("Error adding to components: %s", e) + if self.arvrunner.uuid: + try: + job = self.arvrunner.api.jobs().get(uuid=self.arvrunner.uuid).execute() + if job: + components = job["components"] + components[self.name] = record["uuid"] + self.arvrunner.api.jobs().update( + uuid=self.arvrunner.uuid, + body={ + "components": components + }).execute(num_retries=self.arvrunner.num_retries) + except Exception: + logger.exception("Error adding to components") def done(self, record): try: @@ -212,8 +236,11 @@ class ArvadosJob(object): try: if record["state"] == "Complete": processStatus = "success" + # we don't have the real exit code so fake it. + record["exit_code"] = 0 else: processStatus = "permanentFail" + record["exit_code"] = 1 outputs = {} try: @@ -223,7 +250,7 @@ class ArvadosJob(object): api_client=self.arvrunner.api, keep_client=self.arvrunner.keep_client, num_retries=self.arvrunner.num_retries) - log = logc.open(logc.keys()[0]) + log = logc.open(list(logc.keys())[0]) dirs = { "tmpdir": "/tmpdir", "outdir": "/outdir", @@ -242,16 +269,18 @@ class ArvadosJob(object): dirs[g.group(1)] = g.group(2) if processStatus == "permanentFail": - done.logtail(logc, logger, "%s error log:" % self.arvrunner.label(self)) + done.logtail(logc, logger.error, "%s (%s) error log:" % (self.arvrunner.label(self), record["uuid"]), maxlen=40) with Perf(metrics, "output collection %s" % self.name): outputs = done.done(self, record, dirs["tmpdir"], dirs["outdir"], dirs["keep"]) except WorkflowException as e: + # Only include a stack trace if in debug mode. + # This is most likely a user workflow error and a stack trace may obfuscate more useful output. logger.error("%s unable to collect output from %s:\n%s", self.arvrunner.label(self), record["output"], e, exc_info=(e if self.arvrunner.debug else False)) processStatus = "permanentFail" - except Exception as e: + except Exception: logger.exception("Got unknown exception while collecting output for job %s:", self.name) processStatus = "permanentFail" @@ -263,13 +292,12 @@ class ArvadosJob(object): processStatus = "permanentFail" finally: self.output_callback(outputs, processStatus) - if record["uuid"] in self.arvrunner.processes: - del self.arvrunner.processes[record["uuid"]] + class RunnerJob(Runner): """Submit and manage a Crunch job that runs crunch_scripts/cwl-runner.""" - def arvados_job_spec(self, dry_run=False, pull_image=True, **kwargs): + def arvados_job_spec(self, debug=False): """Create an Arvados job specification for this workflow. The returned dict can be used to create a job (i.e., passed as @@ -277,10 +305,10 @@ class RunnerJob(Runner): a pipeline template or pipeline instance. """ - if self.tool.tool["id"].startswith("keep:"): - self.job_order["cwl:tool"] = self.tool.tool["id"][5:] + if self.embedded_tool.tool["id"].startswith("keep:"): + self.job_order["cwl:tool"] = self.embedded_tool.tool["id"][5:] else: - packed = packed_workflow(self.arvrunner, self.tool) + packed = packed_workflow(self.arvrunner, self.embedded_tool, self.merged_map) wf_pdh = upload_workflow_collection(self.arvrunner, self.name, packed) self.job_order["cwl:tool"] = "%s/workflow.cwl#main" % wf_pdh @@ -299,7 +327,7 @@ class RunnerJob(Runner): if self.on_error: self.job_order["arv:on_error"] = self.on_error - if kwargs.get("debug"): + if debug: self.job_order["arv:debug"] = True return { @@ -314,8 +342,8 @@ class RunnerJob(Runner): } } - def run(self, *args, **kwargs): - job_spec = self.arvados_job_spec(*args, **kwargs) + def run(self, runtimeContext): + job_spec = self.arvados_job_spec(runtimeContext.debug) job_spec.setdefault("owner_uuid", self.arvrunner.project_uuid) @@ -324,7 +352,7 @@ class RunnerJob(Runner): find_or_create=self.enable_reuse ).execute(num_retries=self.arvrunner.num_retries) - for k,v in job_spec["script_parameters"].items(): + for k,v in viewitems(job_spec["script_parameters"]): if v is False or v is None or isinstance(v, dict): job_spec["script_parameters"][k] = {"value": v} @@ -346,15 +374,12 @@ class RunnerJob(Runner): body=instance_spec).execute(num_retries=self.arvrunner.num_retries) logger.info("Created pipeline %s", self.arvrunner.pipeline["uuid"]) - if kwargs.get("wait") is False: + if runtimeContext.wait is False: self.uuid = self.arvrunner.pipeline["uuid"] return self.uuid = job["uuid"] - self.arvrunner.processes[self.uuid] = self - - if job["state"] in ("Complete", "Failed", "Cancelled"): - self.done(job) + self.arvrunner.process_submitted(self) class RunnerTemplate(object): @@ -370,18 +395,21 @@ class RunnerTemplate(object): } def __init__(self, runner, tool, job_order, enable_reuse, uuid, - submit_runner_ram=0, name=None): + submit_runner_ram=0, name=None, merged_map=None, + loadingContext=None): self.runner = runner - self.tool = tool + self.embedded_tool = tool self.job = RunnerJob( runner=runner, tool=tool, - job_order=job_order, enable_reuse=enable_reuse, output_name=None, output_tags=None, submit_runner_ram=submit_runner_ram, - name=name) + name=name, + merged_map=merged_map, + loadingContext=loadingContext) + self.job.job_order = job_order self.uuid = uuid def pipeline_component_spec(self): @@ -403,7 +431,7 @@ class RunnerTemplate(object): job_params = spec['script_parameters'] spec['script_parameters'] = {} - for param in self.tool.tool['inputs']: + for param in self.embedded_tool.tool['inputs']: param = copy.deepcopy(param) # Data type and "required" flag...