for v in obj:
self.check_writable(v)
- def make_output_collection(self, name, outputObj):
+ def make_output_collection(self, name, outputObj, tagsString):
outputObj = copy.deepcopy(outputObj)
files = []
final.api_response()["name"],
final.manifest_locator())
+ final_uuid = final.manifest_locator()
+ tags = tagsString.split(',')
+ for tag in tags:
+ self.api.links().create(body={"head_uuid": final_uuid, "link_class": "tag", "name": tag}).execute()
+
self.final_output_collection = final
def set_crunch_output(self):
else:
if self.output_name is None:
self.output_name = "Output of %s" % (shortname(tool.tool["id"]))
- self.make_output_collection(self.output_name, self.final_output)
+ self.make_output_collection(self.output_name, self.final_output, kwargs.get("output_tags", ""))
self.set_crunch_output()
if self.final_status != "success":
parser.add_argument("--project-uuid", type=str, metavar="UUID", help="Project that will own the workflow jobs, if not provided, will go to home project.")
parser.add_argument("--output-name", type=str, help="Name to use for collection that stores the final output.", default=None)
+ parser.add_argument("--output-tags", type=str, help="Tags for the final output collection separated by commas, e.g., '--output-tags tag0,tag1,tag2'.", default=None)
parser.add_argument("--ignore-docker-for-reuse", action="store_true",
help="Ignore Docker image version when deciding whether to reuse past jobs.",
default=False)
if self.output_name:
command.append("--output-name=" + self.output_name)
+ if kwargs.get("output_tags"):
+ command.append("--output-tags=" + kwargs.get("output_tags"))
+
if self.enable_reuse:
command.append("--enable-reuse")
else:
if self.output_name:
self.job_order["arv:output_name"] = self.output_name
+ if kwargs.get("output_tags"):
+ self.job_order["arv:output_tags"] = kwargs.get("output_tags")
+
self.job_order["arv:enable_reuse"] = self.enable_reuse
return {
output_name = job_order_object["arv:output_name"]
del job_order_object["arv:output_name"]
+ if "arv:output_tags" in job_order_object:
+ args.output_tags = job_order_object["arv:output_tags"]
+ del job_order_object["arv:output_tags"]
+
if "arv:enable_reuse" in job_order_object:
enable_reuse = job_order_object["arv:enable_reuse"]
del job_order_object["arv:enable_reuse"]
"location": "keep:99999999999999999999999999999992+99/bar.txt",
"basename": "baz.txt"
}
- })
+ }, "")
final.copy.assert_has_calls([mock.call('bar.txt', 'baz.txt', overwrite=False, source_collection=readermock)])
final.copy.assert_has_calls([mock.call('foo.txt', 'foo.txt', overwrite=False, source_collection=readermock)])
self.assertEqual(capture_stdout.getvalue(),
stubs.expect_pipeline_uuid + '\n')
+ @mock.patch("time.sleep")
+ @stubs
+ def test_submit_output_tags(self, stubs, tm):
+ output_tags = "tag0,tag1,tag2"
+
+ capture_stdout = cStringIO.StringIO()
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--debug", "--output-tags", output_tags,
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ capture_stdout, sys.stderr, api_client=stubs.api)
+ self.assertEqual(exited, 0)
+
+ stubs.expect_pipeline_instance["components"]["cwl-runner"]["script_parameters"]["arv:output_tags"] = "tag0,tag1,tag2"
+
+ expect_pipeline = copy.deepcopy(stubs.expect_pipeline_instance)
+ expect_pipeline["owner_uuid"] = stubs.fake_user_uuid
+ stubs.api.pipeline_instances().create.assert_called_with(
+ body=expect_pipeline)
+ self.assertEqual(capture_stdout.getvalue(),
+ stubs.expect_pipeline_uuid + '\n')
+
@mock.patch("time.sleep")
@stubs
def test_submit_with_project_uuid(self, stubs, tm):