11462: Fix crunch script for fetcher_constructor constructor change.
[arvados.git] / sdk / cwl / arvados_cwl / crunch_script.py
1 # Crunch script integration for running arvados-cwl-runner (importing
2 # arvados_cwl module) inside a crunch job.
3 #
4 # This gets the job record, transforms the script parameters into a valid CWL
5 # input object, then executes the CWL runner to run the underlying workflow or
6 # tool.  When the workflow completes, record the output object in an output
7 # collection for this runner job.
8
9 import arvados
10 import arvados_cwl
11 import arvados.collection
12 import arvados.util
13 import cwltool.main
14 import logging
15 import os
16 import json
17 import argparse
18 import re
19 import functools
20
21 from arvados.api import OrderedJsonModel
22 from cwltool.process import shortname, adjustFileObjs, adjustDirObjs, normalizeFilesDirs
23 from cwltool.load_tool import load_tool
24 from cwltool.errors import WorkflowException
25
26 from .fsaccess import CollectionFetcher, CollectionFsAccess
27
28 logger = logging.getLogger('arvados.cwl-runner')
29
30 def run():
31     # Timestamps are added by crunch-job, so don't print redundant timestamps.
32     arvados.log_handler.setFormatter(logging.Formatter('%(name)s %(levelname)s: %(message)s'))
33
34     # Print package versions
35     logger.info(arvados_cwl.versionstring())
36
37     api = arvados.api("v1")
38
39     arvados_cwl.add_arv_hints()
40
41     runner = None
42     try:
43         job_order_object = arvados.current_job()['script_parameters']
44         toolpath = "file://%s/%s" % (os.environ['TASK_KEEPMOUNT'], job_order_object.pop("cwl:tool"))
45
46         pdh_path = re.compile(r'^[0-9a-f]{32}\+\d+(/.+)?$')
47
48         def keeppath(v):
49             if pdh_path.match(v):
50                 return "keep:%s" % v
51             else:
52                 return v
53
54         def keeppathObj(v):
55             v["location"] = keeppath(v["location"])
56
57         for k,v in job_order_object.items():
58             if isinstance(v, basestring) and arvados.util.keep_locator_pattern.match(v):
59                 job_order_object[k] = {
60                     "class": "File",
61                     "location": "keep:%s" % v
62                 }
63
64         adjustFileObjs(job_order_object, keeppathObj)
65         adjustDirObjs(job_order_object, keeppathObj)
66         normalizeFilesDirs(job_order_object)
67
68         output_name = None
69         output_tags = None
70         enable_reuse = True
71         on_error = "continue"
72         if "arv:output_name" in job_order_object:
73             output_name = job_order_object["arv:output_name"]
74             del job_order_object["arv:output_name"]
75
76         if "arv:output_tags" in job_order_object:
77             output_tags = job_order_object["arv:output_tags"]
78             del job_order_object["arv:output_tags"]
79
80         if "arv:enable_reuse" in job_order_object:
81             enable_reuse = job_order_object["arv:enable_reuse"]
82             del job_order_object["arv:enable_reuse"]
83
84         if "arv:on_error" in job_order_object:
85             on_error = job_order_object["arv:on_error"]
86             del job_order_object["arv:on_error"]
87
88         runner = arvados_cwl.ArvCwlRunner(api_client=arvados.api('v1', model=OrderedJsonModel()),
89                                           output_name=output_name, output_tags=output_tags)
90
91         make_fs_access = functools.partial(CollectionFsAccess,
92                                  collection_cache=runner.collection_cache)
93
94         t = load_tool(toolpath, runner.arv_make_tool,
95                       fetcher_constructor=functools.partial(CollectionFetcher,
96                                                   api_client=runner.api,
97                                                   fs_access=make_fs_access(""),
98                                                   num_retries=runner.num_retries))
99
100         args = argparse.Namespace()
101         args.project_uuid = arvados.current_job()["owner_uuid"]
102         args.enable_reuse = enable_reuse
103         args.on_error = on_error
104         args.submit = False
105         args.debug = False
106         args.quiet = False
107         args.ignore_docker_for_reuse = False
108         args.basedir = os.getcwd()
109         args.name = None
110         args.cwl_runner_job={"uuid": arvados.current_job()["uuid"], "state": arvados.current_job()["state"]}
111         args.make_fs_access = make_fs_access
112
113         runner.arv_executor(t, job_order_object, **vars(args))
114     except Exception as e:
115         if isinstance(e, WorkflowException):
116             logging.info("Workflow error %s", e)
117         else:
118             logging.exception("Unhandled exception")
119         if runner and runner.final_output_collection:
120             outputCollection = runner.final_output_collection.portable_data_hash()
121         else:
122             outputCollection = None
123         api.job_tasks().update(uuid=arvados.current_task()['uuid'],
124                                              body={
125                                                  'output': outputCollection,
126                                                  'success': False,
127                                                  'progress':1.0
128                                              }).execute()