from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, normalizeFilesDirs
from cwltool.load_tool import load_tool
from cwltool.errors import WorkflowException
+from cwltool.context import RuntimeContext
from .fsaccess import CollectionFetcher, CollectionFsAccess
debug = job_order_object["arv:debug"]
del job_order_object["arv:debug"]
- runner = arvados_cwl.ArvCwlRunner(api_client=arvados.api('v1', model=OrderedJsonModel()),
+ runner = arvados_cwl.ArvCwlRunner(api_client=arvados.safeapi.ThreadSafeApiCache(
+ api_params={"model": OrderedJsonModel()}, keep_params={"num_retries": 4}),
output_name=output_name, output_tags=output_tags)
make_fs_access = functools.partial(CollectionFsAccess,
logging.getLogger('arvados').setLevel(logging.DEBUG)
logging.getLogger("cwltool").setLevel(logging.DEBUG)
- args = argparse.Namespace()
+ args = RuntimeContext()
args.project_uuid = arvados.current_job()["owner_uuid"]
args.enable_reuse = enable_reuse
args.on_error = on_error
args.make_fs_access = make_fs_access
args.trash_intermediate = False
args.intermediate_output_ttl = 0
+ args.priority = arvados_cwl.DEFAULT_PRIORITY
+ args.do_validate = True
+ args.disable_js_validation = False
+ args.tmp_outdir_prefix = "tmp"
- runner.arv_executor(t, job_order_object, **vars(args))
+ runner.arv_executor(t, job_order_object, args)
except Exception as e:
if isinstance(e, WorkflowException):
logging.info("Workflow error %s", e)