with Perf(metrics, "arv_docker_get_image %s" % self.name):
(docker_req, docker_is_req) = get_feature(self, "DockerRequirement")
if docker_req and kwargs.get("use_container") is not False:
+ if docker_req.get("dockerOutputDirectory"):
+ raise UnsupportedRequirement("Option 'dockerOutputDirectory' of DockerRequirement not supported.")
runtime_constraints["docker_image"] = arv_docker_get_image(self.arvrunner.api, docker_req, pull_image, self.arvrunner.project_uuid)
else:
runtime_constraints["docker_image"] = arvados_jobs_image(self.arvrunner)
with Perf(metrics, "done %s" % self.name):
self.done(response)
except Exception as e:
- logger.error("Got error %s" % str(e))
+ logger.exception("Job %s error" % (self.name))
self.output_callback({}, "permanentFail")
def update_pipeline_component(self, record):
outputs = done.done(self, record, dirs["tmpdir"],
dirs["outdir"], dirs["keep"])
except WorkflowException as e:
- logger.error("Error while collecting job outputs:\n%s", e, exc_info=(e if self.arvrunner.debug else False))
+ logger.error("Error while collecting output for job %s:\n%s", self.name, e, exc_info=(e if self.arvrunner.debug else False))
processStatus = "permanentFail"
- outputs = None
except Exception as e:
- logger.exception("Got unknown exception while collecting job outputs:")
+ logger.exception("Got unknown exception while collecting output for job %s:", self.name)
+ processStatus = "permanentFail"
+
+ # Note: Currently, on error output_callback is expecting an empty dict,
+ # anything else will fail.
+ if not isinstance(outputs, dict):
+ logger.error("Unexpected output type %s '%s'", type(outputs), outputs)
+ outputs = {}
processStatus = "permanentFail"
- outputs = None
self.output_callback(outputs, processStatus)
finally:
if self.output_name:
self.job_order["arv:output_name"] = self.output_name
- if kwargs.get("output_tags"):
- self.job_order["arv:output_tags"] = kwargs.get("output_tags")
+ if self.output_tags:
+ self.job_order["arv:output_tags"] = self.output_tags
self.job_order["arv:enable_reuse"] = self.enable_reuse
"repository": "arvados",
"script_parameters": self.job_order,
"runtime_constraints": {
- "docker_image": arvados_jobs_image(self.arvrunner)
+ "docker_image": arvados_jobs_image(self.arvrunner),
+ "min_ram_mb_per_node": self.submit_runner_ram
}
}
'string': 'text',
}
- def __init__(self, runner, tool, job_order, enable_reuse):
+ def __init__(self, runner, tool, job_order, enable_reuse, uuid, submit_runner_ram=0):
self.runner = runner
self.tool = tool
self.job = RunnerJob(
tool=tool,
job_order=job_order,
enable_reuse=enable_reuse,
- output_name=None)
+ output_name=None,
+ output_tags=None,
+ submit_runner_ram=submit_runner_ram)
+ self.uuid = uuid
def pipeline_component_spec(self):
"""Return a component that Workbench and a-r-p-i will understand.
return spec
def save(self):
- job_spec = self.pipeline_component_spec()
- response = self.runner.api.pipeline_templates().create(body={
+ body = {
"components": {
- self.job.name: job_spec,
+ self.job.name: self.pipeline_component_spec(),
},
"name": self.job.name,
- "owner_uuid": self.runner.project_uuid,
- }, ensure_unique_name=True).execute(num_retries=self.runner.num_retries)
- self.uuid = response["uuid"]
- logger.info("Created template %s", self.uuid)
+ }
+ if self.runner.project_uuid:
+ body["owner_uuid"] = self.runner.project_uuid
+ if self.uuid:
+ self.runner.api.pipeline_templates().update(
+ uuid=self.uuid, body=body).execute(
+ num_retries=self.runner.num_retries)
+ logger.info("Updated template %s", self.uuid)
+ else:
+ self.uuid = self.runner.api.pipeline_templates().create(
+ body=body, ensure_unique_name=True).execute(
+ num_retries=self.runner.num_retries)['uuid']
+ logger.info("Created template %s", self.uuid)