X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/803fbc7e350dfa6b70ac63c7bf5f9046a0625155..d6cccb3ea4e5f076a436d9935e3835d4b620b859:/sdk/cwl/arvados_cwl/arvjob.py diff --git a/sdk/cwl/arvados_cwl/arvjob.py b/sdk/cwl/arvados_cwl/arvjob.py index 9a03372d32..bd8ab1137e 100644 --- a/sdk/cwl/arvados_cwl/arvjob.py +++ b/sdk/cwl/arvados_cwl/arvjob.py @@ -1,3 +1,5 @@ +from past.builtins import basestring +from builtins import object # Copyright (C) The Arvados Authors. All rights reserved. # # SPDX-License-Identifier: Apache-2.0 @@ -67,11 +69,11 @@ class ArvadosJob(JobBase): keep_client=self.arvrunner.keep_client, num_retries=self.arvrunner.num_retries) script_parameters["task.vwd"] = {} - generatemapper = VwdPathMapper([self.generatefiles], "", "", + generatemapper = VwdPathMapper(self.generatefiles["listing"], "", "", separateDirs=False) with Perf(metrics, "createfiles %s" % self.name): - for f, p in generatemapper.items(): + for f, p in list(generatemapper.items()): if p.type == "CreateFile": with vwd.open(p.target, "w") as n: n.write(p.resolved.encode("utf-8")) @@ -85,7 +87,7 @@ class ArvadosJob(JobBase): trash_at=info["trash_at"], properties=info["properties"]) - for f, p in generatemapper.items(): + for f, p in list(generatemapper.items()): if p.type == "File": script_parameters["task.vwd"][p.target] = p.resolved if p.type == "CreateFile": @@ -243,7 +245,7 @@ class ArvadosJob(JobBase): api_client=self.arvrunner.api, keep_client=self.arvrunner.keep_client, num_retries=self.arvrunner.num_retries) - log = logc.open(logc.keys()[0]) + log = logc.open(list(logc.keys())[0]) dirs = { "tmpdir": "/tmpdir", "outdir": "/outdir", @@ -296,10 +298,10 @@ class RunnerJob(Runner): a pipeline template or pipeline instance. """ - if self.tool.tool["id"].startswith("keep:"): - self.job_order["cwl:tool"] = self.tool.tool["id"][5:] + if self.embedded_tool.tool["id"].startswith("keep:"): + self.job_order["cwl:tool"] = self.embedded_tool.tool["id"][5:] else: - packed = packed_workflow(self.arvrunner, self.tool, self.merged_map) + packed = packed_workflow(self.arvrunner, self.embedded_tool, self.merged_map) wf_pdh = upload_workflow_collection(self.arvrunner, self.name, packed) self.job_order["cwl:tool"] = "%s/workflow.cwl#main" % wf_pdh @@ -343,7 +345,7 @@ class RunnerJob(Runner): find_or_create=self.enable_reuse ).execute(num_retries=self.arvrunner.num_retries) - for k,v in job_spec["script_parameters"].items(): + for k,v in list(job_spec["script_parameters"].items()): if v is False or v is None or isinstance(v, dict): job_spec["script_parameters"][k] = {"value": v} @@ -386,19 +388,21 @@ class RunnerTemplate(object): } def __init__(self, runner, tool, job_order, enable_reuse, uuid, - submit_runner_ram=0, name=None, merged_map=None): + submit_runner_ram=0, name=None, merged_map=None, + loadingContext=None): self.runner = runner - self.tool = tool + self.embedded_tool = tool self.job = RunnerJob( runner=runner, tool=tool, - job_order=job_order, enable_reuse=enable_reuse, output_name=None, output_tags=None, submit_runner_ram=submit_runner_ram, name=name, - merged_map=merged_map) + merged_map=merged_map, + loadingContext=loadingContext) + self.job.job_order = job_order self.uuid = uuid def pipeline_component_spec(self): @@ -420,7 +424,7 @@ class RunnerTemplate(object): job_params = spec['script_parameters'] spec['script_parameters'] = {} - for param in self.tool.tool['inputs']: + for param in self.embedded_tool.tool['inputs']: param = copy.deepcopy(param) # Data type and "required" flag...