1 # Crunch script integration for running arvados-cwl-runner (importing
2 # arvados_cwl module) inside a crunch job.
4 # This gets the job record, transforms the script parameters into a valid CWL
5 # input object, then executes the CWL runner to run the underlying workflow or
6 # tool. When the workflow completes, record the output object in an output
7 # collection for this runner job.
11 import arvados.collection
21 from arvados.api import OrderedJsonModel
22 from cwltool.process import shortname, adjustFileObjs, adjustDirObjs, getListing, normalizeFilesDirs
23 from cwltool.load_tool import load_tool
24 from cwltool.errors import WorkflowException
26 logger = logging.getLogger('arvados.cwl-runner')
29 # Print package versions
30 logger.info(arvados_cwl.versionstring())
32 api = arvados.api("v1")
34 arvados_cwl.add_arv_hints()
38 job_order_object = arvados.current_job()['script_parameters']
40 pdh_path = re.compile(r'^[0-9a-f]{32}\+\d+(/.+)?$')
49 v["location"] = keeppath(v["location"])
51 job_order_object["cwl:tool"] = "file://%s/%s" % (os.environ['TASK_KEEPMOUNT'], job_order_object["cwl:tool"])
53 for k,v in job_order_object.items():
54 if isinstance(v, basestring) and arvados.util.keep_locator_pattern.match(v):
55 job_order_object[k] = {
57 "location": "keep:%s" % v
60 adjustFileObjs(job_order_object, keeppathObj)
61 adjustDirObjs(job_order_object, keeppathObj)
62 normalizeFilesDirs(job_order_object)
63 adjustDirObjs(job_order_object, functools.partial(getListing, arvados_cwl.fsaccess.CollectionFsAccess("", api_client=api)))
68 if "arv:output_name" in job_order_object:
69 output_name = job_order_object["arv:output_name"]
70 del job_order_object["arv:output_name"]
72 if "arv:output_tags" in job_order_object:
73 output_tags = job_order_object["arv:output_tags"]
74 del job_order_object["arv:output_tags"]
76 if "arv:enable_reuse" in job_order_object:
77 enable_reuse = job_order_object["arv:enable_reuse"]
78 del job_order_object["arv:enable_reuse"]
80 runner = arvados_cwl.ArvCwlRunner(api_client=arvados.api('v1', model=OrderedJsonModel()),
81 output_name=output_name, output_tags=output_tags)
83 t = load_tool(job_order_object, runner.arv_make_tool)
85 args = argparse.Namespace()
86 args.project_uuid = arvados.current_job()["owner_uuid"]
87 args.enable_reuse = enable_reuse
91 args.ignore_docker_for_reuse = False
92 args.basedir = os.getcwd()
93 args.cwl_runner_job={"uuid": arvados.current_job()["uuid"], "state": arvados.current_job()["state"]}
94 outputObj = runner.arv_executor(t, job_order_object, **vars(args))
95 except Exception as e:
96 if isinstance(e, WorkflowException):
97 logging.info("Workflow error %s", e)
99 logging.exception("Unhandled exception")
100 if runner and runner.final_output_collection:
101 outputCollection = runner.final_output_collection.portable_data_hash()
103 outputCollection = None
104 api.job_tasks().update(uuid=arvados.current_task()['uuid'],
106 'output': outputCollection,