"""
- def __init__(self, api_client, work_api=None, keep_client=None, output_name=None):
+ def __init__(self, api_client, work_api=None, keep_client=None, output_name=None, output_tags=None):
self.api = api_client
self.processes = {}
self.lock = threading.Lock()
self.pipeline = None
self.final_output_collection = None
self.output_name = output_name
+ self.output_tags = output_tags
self.project_uuid = None
if keep_client is not None:
for v in obj:
self.check_writable(v)
- def make_output_collection(self, name, outputObj):
+ def make_output_collection(self, name, tagsString, outputObj):
outputObj = copy.deepcopy(outputObj)
files = []
final.api_response()["name"],
final.manifest_locator())
+ final_uuid = final.manifest_locator()
+ tags = tagsString.split(',')
+ for tag in tags:
+ self.api.links().create(body={
+ "head_uuid": final_uuid, "link_class": "tag", "name": tag
+ }).execute(num_retries=self.num_retries)
+
def finalcollection(fileobj):
fileobj["location"] = "keep:%s/%s" % (final.portable_data_hash(), fileobj["location"])
self.output_callback,
**kwargs).next()
else:
- runnerjob = RunnerContainer(self, tool, job_order, kwargs.get("enable_reuse"), self.output_name)
+ runnerjob = RunnerContainer(self, tool, job_order, kwargs.get("enable_reuse"), self.output_name, self.output_tags)
else:
- runnerjob = RunnerJob(self, tool, job_order, kwargs.get("enable_reuse"), self.output_name)
+ runnerjob = RunnerJob(self, tool, job_order, kwargs.get("enable_reuse"), self.output_name, self.output_tags)
if not kwargs.get("submit") and "cwl_runner_job" not in kwargs and not self.work_api == "containers":
# Create pipeline for local run
else:
if self.output_name is None:
self.output_name = "Output of %s" % (shortname(tool.tool["id"]))
- self.final_output, self.final_output_collection = self.make_output_collection(self.output_name, self.final_output)
+ if self.output_tags is None:
+ self.output_tags = ""
+ self.final_output, self.final_output_collection = self.make_output_collection(self.output_name, self.output_tags, self.final_output)
self.set_crunch_output()
if self.final_status != "success":
parser.add_argument("--project-uuid", type=str, metavar="UUID", help="Project that will own the workflow jobs, if not provided, will go to home project.")
parser.add_argument("--output-name", type=str, help="Name to use for collection that stores the final output.", default=None)
+ parser.add_argument("--output-tags", type=str, help="Tags for the final output collection separated by commas, e.g., '--output-tags tag0,tag1,tag2'.", default=None)
parser.add_argument("--ignore-docker-for-reuse", action="store_true",
help="Ignore Docker image version when deciding whether to reuse past jobs.",
default=False)
try:
if api_client is None:
api_client=arvados.api('v1', model=OrderedJsonModel())
- runner = ArvCwlRunner(api_client, work_api=arvargs.work_api, keep_client=keep_client, output_name=arvargs.output_name)
+ runner = ArvCwlRunner(api_client, work_api=arvargs.work_api, keep_client=keep_client, output_name=arvargs.output_name, output_tags=arvargs.output_tags)
except Exception as e:
logger.error(e)
return 1