+ def check_writable(self, obj):
+ if isinstance(obj, dict):
+ if obj.get("writable"):
+ raise UnsupportedRequirement("InitialWorkDir feature 'writable: true' not supported")
+ for v in obj.itervalues():
+ self.check_writable(v)
+ if isinstance(obj, list):
+ for v in obj:
+ self.check_writable(v)
+
+ def make_output_collection(self, name, outputObj):
+ outputObj = copy.deepcopy(outputObj)
+
+ files = []
+ def capture(fileobj):
+ files.append(fileobj)
+
+ adjustDirObjs(outputObj, capture)
+ adjustFileObjs(outputObj, capture)
+
+ generatemapper = FinalOutputPathMapper(files, "", "", separateDirs=False)
+
+ final = arvados.collection.Collection(api_client=self.api,
+ keep_client=self.keep_client,
+ num_retries=self.num_retries)
+
+ srccollections = {}
+ for k,v in generatemapper.items():
+ if k.startswith("_:"):
+ if v.type == "Directory":
+ continue
+ if v.type == "CreateFile":
+ with final.open(v.target, "wb") as f:
+ f.write(v.resolved.encode("utf-8"))
+ continue
+
+ if not k.startswith("keep:"):
+ raise Exception("Output source is not in keep or a literal")
+ sp = k.split("/")
+ srccollection = sp[0][5:]
+ if srccollection not in srccollections:
+ try:
+ srccollections[srccollection] = arvados.collection.CollectionReader(
+ srccollection,
+ api_client=self.api,
+ keep_client=self.keep_client,
+ num_retries=self.num_retries)
+ except arvados.errors.ArgumentError as e:
+ logger.error("Creating CollectionReader for '%s' '%s': %s", k, v, e)
+ raise
+ reader = srccollections[srccollection]
+ try:
+ srcpath = "/".join(sp[1:]) if len(sp) > 1 else "."
+ final.copy(srcpath, v.target, source_collection=reader, overwrite=False)
+ except IOError as e:
+ logger.warn("While preparing output collection: %s", e)
+
+ def rewrite(fileobj):
+ fileobj["location"] = generatemapper.mapper(fileobj["location"]).target
+ for k in ("basename", "listing", "contents"):
+ if k in fileobj:
+ del fileobj[k]
+
+ adjustDirObjs(outputObj, rewrite)
+ adjustFileObjs(outputObj, rewrite)
+
+ with final.open("cwl.output.json", "w") as f:
+ json.dump(outputObj, f, sort_keys=True, indent=4, separators=(',',': '))
+
+ final.save_new(name=name, owner_uuid=self.project_uuid, ensure_unique_name=True)
+
+ logger.info("Final output collection %s \"%s\" (%s)", final.portable_data_hash(),
+ final.api_response()["name"],
+ final.manifest_locator())
+
+ def finalcollection(fileobj):
+ fileobj["location"] = "keep:%s/%s" % (final.portable_data_hash(), fileobj["location"])
+
+ adjustDirObjs(outputObj, finalcollection)
+ adjustFileObjs(outputObj, finalcollection)
+
+ return (outputObj, final)
+
+ def set_crunch_output(self):
+ if self.work_api == "containers":
+ try:
+ current = self.api.containers().current().execute(num_retries=self.num_retries)
+ self.api.containers().update(uuid=current['uuid'],
+ body={
+ 'output': self.final_output_collection.portable_data_hash(),
+ }).execute(num_retries=self.num_retries)
+ except Exception as e:
+ logger.info("Setting container output: %s", e)
+ elif self.work_api == "jobs" and "TASK_UUID" in os.environ:
+ self.api.job_tasks().update(uuid=os.environ["TASK_UUID"],
+ body={
+ 'output': self.final_output_collection.portable_data_hash(),
+ 'success': self.final_status == "success",
+ 'progress':1.0
+ }).execute(num_retries=self.num_retries)
+
+ def arv_executor(self, tool, job_order, **kwargs):