X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/a3d2b8e1de5b8c785846ddc57ae9a4c02bc51adc..4e5838bd9e1a7baa5b3e53e97e308140e4b6105f:/sdk/cwl/arvados_cwl/executor.py diff --git a/sdk/cwl/arvados_cwl/executor.py b/sdk/cwl/arvados_cwl/executor.py index 0bb17e99a2..3241fb607c 100644 --- a/sdk/cwl/arvados_cwl/executor.py +++ b/sdk/cwl/arvados_cwl/executor.py @@ -31,8 +31,8 @@ from arvados.keep import KeepClient from arvados.errors import ApiError import arvados_cwl.util -from .arvcontainer import RunnerContainer -from .runner import Runner, upload_docker, upload_job_order, upload_workflow_deps +from .arvcontainer import RunnerContainer, cleanup_name_for_collection +from .runner import Runner, upload_docker, upload_job_order, upload_workflow_deps, make_builder from .arvtool import ArvadosCommandTool, validate_cluster_target, ArvadosExpressionTool from .arvworkflow import ArvadosWorkflow, upload_workflow from .fsaccess import CollectionFsAccess, CollectionFetcher, collectionResolver, CollectionCache, pdh_size @@ -565,8 +565,9 @@ The 'jobs' API is no longer supported. self.project_uuid = runtimeContext.project_uuid # Upload local file references in the job order. - job_order = upload_job_order(self, "%s input" % runtimeContext.name, - updated_tool, job_order, runtimeContext) + with Perf(metrics, "upload_job_order"): + job_order = upload_job_order(self, "%s input" % runtimeContext.name, + updated_tool, job_order, runtimeContext) # the last clause means: if it is a command line tool, and we # are going to wait for the result, and always_submit_runner @@ -581,19 +582,23 @@ The 'jobs' API is no longer supported. loadingContext = self.loadingContext.copy() loadingContext.do_validate = False + loadingContext.disable_js_validation = True if submitting: loadingContext.do_update = False # Document may have been auto-updated. Reload the original # document with updating disabled because we want to # submit the document with its original CWL version, not # the auto-updated one. - tool = load_tool(updated_tool.tool["id"], loadingContext) + with Perf(metrics, "load_tool original"): + tool = load_tool(updated_tool.tool["id"], loadingContext) else: tool = updated_tool # Upload direct dependencies of workflow steps, get back mapping of files to keep references. # Also uploads docker images. - merged_map = upload_workflow_deps(self, tool, runtimeContext) + logger.info("Uploading workflow dependencies") + with Perf(metrics, "upload_workflow_deps"): + merged_map = upload_workflow_deps(self, tool, runtimeContext) # Recreate process object (ArvadosWorkflow or # ArvadosCommandTool) because tool document may have been @@ -602,7 +607,8 @@ The 'jobs' API is no longer supported. loadingContext.loader = tool.doc_loader loadingContext.avsc_names = tool.doc_schema loadingContext.metadata = tool.metadata - tool = load_tool(tool.tool, loadingContext) + with Perf(metrics, "load_tool"): + tool = load_tool(tool.tool, loadingContext) if runtimeContext.update_workflow or runtimeContext.create_workflow: # Create a pipeline template or workflow record and exit. @@ -627,6 +633,11 @@ The 'jobs' API is no longer supported. runtimeContext.tmpdir_prefix = "tmp" runtimeContext.work_api = self.work_api + if not self.output_name: + self.output_name = "Output from workflow %s" % runtimeContext.name + + self.output_name = cleanup_name_for_collection(self.output_name) + if self.work_api == "containers": if self.ignore_docker_for_reuse: raise Exception("--ignore-docker-for-reuse not supported with containers API.") @@ -779,8 +790,6 @@ The 'jobs' API is no longer supported. if workbench2 or workbench1: logger.info("Output at %scollections/%s", workbench2 or workbench1, tool.final_output) else: - if self.output_name is None: - self.output_name = "Output of %s" % (shortname(tool.tool["id"])) if self.output_tags is None: self.output_tags = "" @@ -794,8 +803,9 @@ The 'jobs' API is no longer supported. output_properties = {} output_properties_req, _ = tool.get_requirement("http://arvados.org/cwl#OutputCollectionProperties") if output_properties_req: + builder = make_builder(job_order, tool.hints, tool.requirements, runtimeContext, tool.metadata) for pr in output_properties_req["outputProperties"]: - output_properties[pr["propertyName"]] = self.builder.do_eval(pr["propertyValue"]) + output_properties[pr["propertyName"]] = builder.do_eval(pr["propertyValue"]) self.final_output, self.final_output_collection = self.make_output_collection(self.output_name, storage_classes, self.output_tags, output_properties,