#
# SPDX-License-Identifier: Apache-2.0
+from past.builtins import basestring
+from builtins import object
+from future.utils import viewitems
+
import logging
import re
import copy
import json
-import datetime
import time
-from cwltool.process import get_feature, shortname, UnsupportedRequirement
+from cwltool.process import shortname, UnsupportedRequirement
from cwltool.errors import WorkflowException
from cwltool.command_line_tool import revmap_file, CommandLineTool
from cwltool.load_tool import fetch_document
from cwltool.builder import Builder
from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, visit_class
+from cwltool.job import JobBase
from schema_salad.sourceline import SourceLine
+import arvados_cwl.util
import ruamel.yaml as yaml
import arvados.collection
from .perf import Perf
from . import done
from ._version import __version__
+from .util import get_intermediate_collection_info
logger = logging.getLogger('arvados.cwl-runner')
metrics = logging.getLogger('arvados.cwl-runner.metrics')
crunchrunner_git_commit = 'a3f2cb186e437bfce0031b024b2157b73ed2717d'
-class ArvadosJob(object):
+class ArvadosJob(JobBase):
"""Submit and manage a Crunch job for executing a CWL CommandLineTool."""
- def __init__(self, runner):
+ def __init__(self, runner,
+ builder, # type: Builder
+ joborder, # type: Dict[Text, Union[Dict[Text, Any], List, Text]]
+ make_path_mapper, # type: Callable[..., PathMapper]
+ requirements, # type: List[Dict[Text, Text]]
+ hints, # type: List[Dict[Text, Text]]
+ name # type: Text
+ ):
+ super(ArvadosJob, self).__init__(builder, joborder, make_path_mapper, requirements, hints, name)
self.arvrunner = runner
self.running = False
self.uuid = None
- def run(self, dry_run=False, pull_image=True, **kwargs):
+ def run(self, runtimeContext):
script_parameters = {
"command": self.command_line
}
keep_client=self.arvrunner.keep_client,
num_retries=self.arvrunner.num_retries)
script_parameters["task.vwd"] = {}
- generatemapper = VwdPathMapper([self.generatefiles], "", "",
+ generatemapper = VwdPathMapper(self.generatefiles["listing"], "", "",
separateDirs=False)
with Perf(metrics, "createfiles %s" % self.name):
n.write(p.resolved.encode("utf-8"))
if vwd:
- trash_time = None
- if self.arvrunner.intermediate_output_ttl > 0:
- trash_time = datetime.datetime.now() + datetime.timedelta(seconds=self.arvrunner.intermediate_output_ttl)
-
- current_container_uuid = None
- try:
- current_container = self.arvrunner.api.containers().current().execute(num_retries=self.arvrunner.num_retries)
- current_container_uuid = current_container['uuid']
- except ApiError as e:
- # Status code 404 just means we're not running in a container.
- if e.resp.status != 404:
- logger.info("Getting current container: %s", e)
-
- props = {"type": "Intermediate",
- "container": current_container_uuid}
with Perf(metrics, "generatefiles.save_new %s" % self.name):
- vwd.save_new(name="Intermediate collection",
- ensure_unique_name=True,
- trash_at=trash_time,
- properties=props)
+ info = get_intermediate_collection_info(self.name, None, runtimeContext.intermediate_output_ttl)
+ vwd.save_new(name=info["name"],
+ owner_uuid=self.arvrunner.project_uuid,
+ ensure_unique_name=True,
+ trash_at=info["trash_at"],
+ properties=info["properties"])
for f, p in generatemapper.items():
if p.type == "File":
script_parameters["task.permanentFailCodes"] = self.permanentFailCodes
with Perf(metrics, "arv_docker_get_image %s" % self.name):
- (docker_req, docker_is_req) = get_feature(self, "DockerRequirement")
- if docker_req and kwargs.get("use_container") is not False:
+ (docker_req, docker_is_req) = self.get_requirement("DockerRequirement")
+ if docker_req and runtimeContext.use_container is not False:
if docker_req.get("dockerOutputDirectory"):
raise SourceLine(docker_req, "dockerOutputDirectory", UnsupportedRequirement).makeError(
"Option 'dockerOutputDirectory' of DockerRequirement not supported.")
- runtime_constraints["docker_image"] = arv_docker_get_image(self.arvrunner.api, docker_req, pull_image, self.arvrunner.project_uuid)
+ runtime_constraints["docker_image"] = arv_docker_get_image(self.arvrunner.api,
+ docker_req,
+ runtimeContext.pull_image,
+ self.arvrunner.project_uuid)
else:
runtime_constraints["docker_image"] = "arvados/jobs"
runtime_constraints["min_ram_mb_per_node"] = resources.get("ram")
runtime_constraints["min_scratch_mb_per_node"] = resources.get("tmpdirSize", 0) + resources.get("outdirSize", 0)
- runtime_req, _ = get_feature(self, "http://arvados.org/cwl#RuntimeConstraints")
+ runtime_req, _ = self.get_requirement("http://arvados.org/cwl#RuntimeConstraints")
if runtime_req:
if "keep_cache" in runtime_req:
runtime_constraints["keep_cache_mb_per_task"] = runtime_req["keep_cache"]
if not self.arvrunner.ignore_docker_for_reuse:
filters.append(["docker_image_locator", "in docker", runtime_constraints["docker_image"]])
- enable_reuse = kwargs.get("enable_reuse", True)
+ enable_reuse = runtimeContext.enable_reuse
if enable_reuse:
- reuse_req, _ = get_feature(self, "http://arvados.org/cwl#ReuseRequirement")
+ reuse_req, _ = self.get_requirement("http://arvados.org/cwl#ReuseRequirement")
if reuse_req:
enable_reuse = reuse_req["enableReuse"]
e)
else:
logger.info("%s %s is %s", self.arvrunner.label(self), response["uuid"], response["state"])
- except Exception as e:
+ except Exception:
logger.exception("%s error" % (self.arvrunner.label(self)))
self.output_callback({}, "permanentFail")
body={
"components": components
}).execute(num_retries=self.arvrunner.num_retries)
- except Exception as e:
- logger.info("Error adding to components: %s", e)
+ except Exception:
+ logger.exception("Error adding to components")
def done(self, record):
try:
api_client=self.arvrunner.api,
keep_client=self.arvrunner.keep_client,
num_retries=self.arvrunner.num_retries)
- log = logc.open(logc.keys()[0])
+ log = logc.open(list(logc.keys())[0])
dirs = {
"tmpdir": "/tmpdir",
"outdir": "/outdir",
dirs[g.group(1)] = g.group(2)
if processStatus == "permanentFail":
- done.logtail(logc, logger, "%s error log:" % self.arvrunner.label(self))
+ done.logtail(logc, logger.error, "%s (%s) error log:" % (self.arvrunner.label(self), record["uuid"]), maxlen=40)
with Perf(metrics, "output collection %s" % self.name):
outputs = done.done(self, record, dirs["tmpdir"],
dirs["outdir"], dirs["keep"])
except WorkflowException as e:
+ # Only include a stack trace if in debug mode.
+ # This is most likely a user workflow error and a stack trace may obfuscate more useful output.
logger.error("%s unable to collect output from %s:\n%s",
self.arvrunner.label(self), record["output"], e, exc_info=(e if self.arvrunner.debug else False))
processStatus = "permanentFail"
- except Exception as e:
+ except Exception:
logger.exception("Got unknown exception while collecting output for job %s:", self.name)
processStatus = "permanentFail"
class RunnerJob(Runner):
"""Submit and manage a Crunch job that runs crunch_scripts/cwl-runner."""
- def arvados_job_spec(self, dry_run=False, pull_image=True, **kwargs):
+ def arvados_job_spec(self, debug=False):
"""Create an Arvados job specification for this workflow.
The returned dict can be used to create a job (i.e., passed as
a pipeline template or pipeline instance.
"""
- if self.tool.tool["id"].startswith("keep:"):
- self.job_order["cwl:tool"] = self.tool.tool["id"][5:]
+ if self.embedded_tool.tool["id"].startswith("keep:"):
+ self.job_order["cwl:tool"] = self.embedded_tool.tool["id"][5:]
else:
- packed = packed_workflow(self.arvrunner, self.tool, self.merged_map)
+ packed = packed_workflow(self.arvrunner, self.embedded_tool, self.merged_map)
wf_pdh = upload_workflow_collection(self.arvrunner, self.name, packed)
self.job_order["cwl:tool"] = "%s/workflow.cwl#main" % wf_pdh
if self.on_error:
self.job_order["arv:on_error"] = self.on_error
- if kwargs.get("debug"):
+ if debug:
self.job_order["arv:debug"] = True
return {
}
}
- def run(self, **kwargs):
- job_spec = self.arvados_job_spec(**kwargs)
+ def run(self, runtimeContext):
+ job_spec = self.arvados_job_spec(runtimeContext.debug)
job_spec.setdefault("owner_uuid", self.arvrunner.project_uuid)
find_or_create=self.enable_reuse
).execute(num_retries=self.arvrunner.num_retries)
- for k,v in job_spec["script_parameters"].items():
+ for k,v in viewitems(job_spec["script_parameters"]):
if v is False or v is None or isinstance(v, dict):
job_spec["script_parameters"][k] = {"value": v}
body=instance_spec).execute(num_retries=self.arvrunner.num_retries)
logger.info("Created pipeline %s", self.arvrunner.pipeline["uuid"])
- if kwargs.get("wait") is False:
+ if runtimeContext.wait is False:
self.uuid = self.arvrunner.pipeline["uuid"]
return
}
def __init__(self, runner, tool, job_order, enable_reuse, uuid,
- submit_runner_ram=0, name=None, merged_map=None):
+ submit_runner_ram=0, name=None, merged_map=None,
+ loadingContext=None):
self.runner = runner
- self.tool = tool
+ self.embedded_tool = tool
self.job = RunnerJob(
runner=runner,
tool=tool,
- job_order=job_order,
enable_reuse=enable_reuse,
output_name=None,
output_tags=None,
submit_runner_ram=submit_runner_ram,
name=name,
- merged_map=merged_map)
+ merged_map=merged_map,
+ loadingContext=loadingContext)
+ self.job.job_order = job_order
self.uuid = uuid
def pipeline_component_spec(self):
job_params = spec['script_parameters']
spec['script_parameters'] = {}
- for param in self.tool.tool['inputs']:
+ for param in self.embedded_tool.tool['inputs']:
param = copy.deepcopy(param)
# Data type and "required" flag...