import json
import time
-from cwltool.process import get_feature, shortname
+from cwltool.process import get_feature, shortname, UnsupportedRequirement
from cwltool.errors import WorkflowException
from cwltool.draft2tool import revmap_file, CommandLineTool
from cwltool.load_tool import fetch_document
from cwltool.builder import Builder
+from cwltool.pathmapper import adjustFileObjs, adjustDirObjs
+
+from schema_salad.sourceline import SourceLine
+
+import ruamel.yaml as yaml
import arvados.collection
from .arvdocker import arv_docker_get_image
-from .runner import Runner, arvados_jobs_image
-from .pathmapper import InitialWorkDirPathMapper
+from .runner import Runner, arvados_jobs_image, packed_workflow, upload_workflow_collection, trim_anonymous_location
+from .pathmapper import VwdPathMapper, trim_listing
from .perf import Perf
from . import done
from ._version import __version__
keep_client=self.arvrunner.keep_client,
num_retries=self.arvrunner.num_retries)
script_parameters["task.vwd"] = {}
- generatemapper = InitialWorkDirPathMapper([self.generatefiles], "", "",
- separateDirs=False)
+ generatemapper = VwdPathMapper([self.generatefiles], "", "",
+ separateDirs=False)
with Perf(metrics, "createfiles %s" % self.name):
for f, p in generatemapper.items():
with vwd.open(p.target, "w") as n:
n.write(p.resolved.encode("utf-8"))
- with Perf(metrics, "generatefiles.save_new %s" % self.name):
- vwd.save_new()
+ if vwd:
+ with Perf(metrics, "generatefiles.save_new %s" % self.name):
+ vwd.save_new()
for f, p in generatemapper.items():
if p.type == "File":
if runtime_req:
if "keep_cache" in runtime_req:
runtime_constraints["keep_cache_mb_per_task"] = runtime_req["keep_cache"]
+ runtime_constraints["min_ram_mb_per_node"] += runtime_req["keep_cache"]
if "outputDirType" in runtime_req:
if runtime_req["outputDirType"] == "local_output_dir":
script_parameters["task.keepTmpOutput"] = False
self.update_pipeline_component(response)
- logger.info("%s %s is %s", self.arvrunner.label(self), response["uuid"], response["state"])
-
- if response["state"] in ("Complete", "Failed", "Cancelled"):
+ if response["state"] == "Complete":
+ logger.info("%s reused job %s", self.arvrunner.label(self), response["uuid"])
with Perf(metrics, "done %s" % self.name):
self.done(response)
+ else:
+ logger.info("%s %s is %s", self.arvrunner.label(self), response["uuid"], response["state"])
except Exception as e:
logger.exception("%s error" % (self.arvrunner.label(self)))
self.output_callback({}, "permanentFail")
a pipeline template or pipeline instance.
"""
- workflowmapper = super(RunnerJob, self).arvados_job_spec(dry_run=dry_run, pull_image=pull_image, **kwargs)
+ if self.tool.tool["id"].startswith("keep:"):
+ self.job_order["cwl:tool"] = self.tool.tool["id"][5:]
+ else:
+ packed = packed_workflow(self.arvrunner, self.tool)
+ wf_pdh = upload_workflow_collection(self.arvrunner, self.name, packed)
+ self.job_order["cwl:tool"] = "%s/workflow.cwl#main" % wf_pdh
- self.job_order["cwl:tool"] = workflowmapper.mapper(self.tool.tool["id"]).target[5:]
+ adjustDirObjs(self.job_order, trim_listing)
+ adjustFileObjs(self.job_order, trim_anonymous_location)
+ adjustDirObjs(self.job_order, trim_anonymous_location)
if self.output_name:
self.job_order["arv:output_name"] = self.output_name
if not isinstance(types, list):
types = [types]
param['required'] = 'null' not in types
- non_null_types = set(types) - set(['null'])
+ non_null_types = [t for t in types if t != "null"]
if len(non_null_types) == 1:
the_type = [c for c in non_null_types][0]
- dataclass = self.type_to_dataclass.get(the_type)
+ dataclass = None
+ if isinstance(the_type, basestring):
+ dataclass = self.type_to_dataclass.get(the_type)
if dataclass:
param['dataclass'] = dataclass
# Note: If we didn't figure out a single appropriate