+ with final.open("cwl.output.json", "w") as f:
+ json.dump(outputObj, f, sort_keys=True, indent=4, separators=(',',': '))
+
+ final.save_new(name=name, owner_uuid=self.project_uuid, ensure_unique_name=True)
+
+ logger.info("Final output collection %s \"%s\" (%s)", final.portable_data_hash(),
+ final.api_response()["name"],
+ final.manifest_locator())
+
+ final_uuid = final.manifest_locator()
+ tags = tagsString.split(',')
+ for tag in tags:
+ self.api.links().create(body={"head_uuid": final_uuid, "link_class": "tag", "name": tag}).execute()
+
+ self.final_output_collection = final
+
+ def set_crunch_output(self):
+ if self.work_api == "containers":
+ try:
+ current = self.api.containers().current().execute(num_retries=self.num_retries)
+ self.api.containers().update(uuid=current['uuid'],
+ body={
+ 'output': self.final_output_collection.portable_data_hash(),
+ }).execute(num_retries=self.num_retries)
+ except Exception as e:
+ logger.info("Setting container output: %s", e)
+ elif self.work_api == "jobs" and "TASK_UUID" in os.environ:
+ self.api.job_tasks().update(uuid=os.environ["TASK_UUID"],
+ body={
+ 'output': self.final_output_collection.portable_data_hash(),
+ 'success': self.final_status == "success",
+ 'progress':1.0
+ }).execute(num_retries=self.num_retries)
+
+ def arv_executor(self, tool, job_order, **kwargs):
+ self.debug = kwargs.get("debug")