X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/fcdf73b20710064a80807e3d8b579a11b629d193..873fcf181c037cc1e42419bfeaf5bb70c9d9e239:/sdk/cwl/arvados_cwl/executor.py diff --git a/sdk/cwl/arvados_cwl/executor.py b/sdk/cwl/arvados_cwl/executor.py index faee1a29f8..240e014e5a 100644 --- a/sdk/cwl/arvados_cwl/executor.py +++ b/sdk/cwl/arvados_cwl/executor.py @@ -2,12 +2,6 @@ # # SPDX-License-Identifier: Apache-2.0 -from __future__ import division -from builtins import next -from builtins import object -from builtins import str -from future.utils import viewvalues, viewitems - import argparse import logging import os @@ -34,9 +28,9 @@ from arvados.errors import ApiError import arvados_cwl.util from .arvcontainer import RunnerContainer, cleanup_name_for_collection -from .runner import Runner, upload_docker, upload_job_order, upload_workflow_deps, make_builder +from .runner import Runner, upload_docker, upload_job_order, upload_workflow_deps, make_builder, update_from_merged_map, print_keep_deps from .arvtool import ArvadosCommandTool, validate_cluster_target, ArvadosExpressionTool -from .arvworkflow import ArvadosWorkflow, upload_workflow, new_upload_workflow, make_workflow_record +from .arvworkflow import ArvadosWorkflow, upload_workflow, make_workflow_record from .fsaccess import CollectionFsAccess, CollectionFetcher, collectionResolver, CollectionCache, pdh_size from .perf import Perf from .pathmapper import NoFollowPathMapper @@ -70,7 +64,7 @@ class RuntimeStatusLoggingHandler(logging.Handler): kind = 'error' elif record.levelno >= logging.WARNING: kind = 'warning' - if kind == 'warning' and record.name == "salad": + if kind == 'warning' and record.name in ("salad", "crunchstat_summary"): # Don't send validation warnings to runtime status, # they're noisy and unhelpful. return @@ -146,6 +140,7 @@ class ArvCwlExecutor(object): self.stdout = stdout self.fast_submit = False self.git_info = arvargs.git_info + self.debug = False if keep_client is not None: self.keep_client = keep_client @@ -266,7 +261,7 @@ The 'jobs' API is no longer supported. activity statuses, for example in the RuntimeStatusLoggingHandler. """ - if kind not in ('error', 'warning'): + if kind not in ('error', 'warning', 'activity'): # Ignore any other status kind return @@ -281,7 +276,7 @@ The 'jobs' API is no longer supported. runtime_status = current.get('runtime_status', {}) original_updatemessage = updatemessage = runtime_status.get(kind, "") - if not updatemessage: + if kind == "activity" or not updatemessage: updatemessage = message # Subsequent messages tacked on in detail @@ -368,9 +363,11 @@ The 'jobs' API is no longer supported. while keys: page = keys[:pageSize] try: - proc_states = table.list(filters=[["uuid", "in", page]]).execute(num_retries=self.num_retries) + proc_states = table.list(filters=[["uuid", "in", page]], select=["uuid", "container_uuid", "state", "log_uuid", + "output_uuid", "modified_at", "properties", + "runtime_constraints"]).execute(num_retries=self.num_retries) except Exception as e: - logger.exception("Error checking states on API server: %s", e) + logger.warning("Temporary error checking states on API server: %s", e) remain_wait = self.poll_interval continue @@ -418,7 +415,7 @@ The 'jobs' API is no longer supported. if obj.get("class") == "InplaceUpdateRequirement": if obj["inplaceUpdate"] and parentfield == "requirements": raise SourceLine(obj, "class", UnsupportedRequirement).makeError("InplaceUpdateRequirement not supported for keep collections.") - for k,v in viewitems(obj): + for k,v in obj.items(): self.check_features(v, parentfield=k) elif isinstance(obj, list): for i,v in enumerate(obj): @@ -593,6 +590,8 @@ The 'jobs' API is no longer supported. def arv_executor(self, updated_tool, job_order, runtimeContext, logger=None): self.debug = runtimeContext.debug + self.runtime_status_update("activity", "initialization") + git_info = self.get_git_info(updated_tool) if self.git_info else {} if git_info: logger.info("Git provenance") @@ -600,6 +599,8 @@ The 'jobs' API is no longer supported. if git_info[g]: logger.info(" %s: %s", g.split("#", 1)[1], git_info[g]) + runtimeContext.git_info = git_info + workbench1 = self.api.config()["Services"]["Workbench1"]["ExternalURL"] workbench2 = self.api.config()["Services"]["Workbench2"]["ExternalURL"] controller = self.api.config()["Services"]["Controller"]["ExternalURL"] @@ -646,6 +647,10 @@ The 'jobs' API is no longer supported. runtimeContext.copy_deps = True runtimeContext.match_local_docker = True + if runtimeContext.print_keep_deps: + runtimeContext.copy_deps = False + runtimeContext.match_local_docker = False + if runtimeContext.update_workflow and self.project_uuid is None: # If we are updating a workflow, make sure anything that # gets uploaded goes into the same parent project, unless @@ -655,34 +660,27 @@ The 'jobs' API is no longer supported. self.project_uuid = runtimeContext.project_uuid + self.runtime_status_update("activity", "data transfer") + # Upload local file references in the job order. with Perf(metrics, "upload_job_order"): job_order, jobmapper = upload_job_order(self, "%s input" % runtimeContext.name, updated_tool, job_order, runtimeContext) + # determine if we are submitting or directly executing the workflow. + # # the last clause means: if it is a command line tool, and we # are going to wait for the result, and always_submit_runner # is false, then we don't submit a runner process. - submitting = (runtimeContext.update_workflow or - runtimeContext.create_workflow or - (runtimeContext.submit and not + submitting = (runtimeContext.submit and not (updated_tool.tool["class"] == "CommandLineTool" and runtimeContext.wait and - not runtimeContext.always_submit_runner))) + not runtimeContext.always_submit_runner)) loadingContext = self.loadingContext.copy() loadingContext.do_validate = False loadingContext.disable_js_validation = True - # if submitting and not self.fast_submit: - # loadingContext.do_update = False - # # Document may have been auto-updated. Reload the original - # # document with updating disabled because we want to - # # submit the document with its original CWL version, not - # # the auto-updated one. - # with Perf(metrics, "load_tool original"): - # tool = load_tool(updated_tool.tool["id"], loadingContext) - # else: tool = updated_tool # Upload direct dependencies of workflow steps, get back mapping of files to keep references. @@ -692,58 +690,64 @@ The 'jobs' API is no longer supported. with Perf(metrics, "upload_workflow_deps"): merged_map = upload_workflow_deps(self, tool, runtimeContext) else: + # in the fast submit case, we are running a workflow that + # has already been uploaded to Arvados, so we assume all + # the dependencies have been pinned to keep references and + # there is nothing to do. merged_map = {} - # Recreate process object (ArvadosWorkflow or - # ArvadosCommandTool) because tool document may have been - # updated by upload_workflow_deps in ways that modify - # hints or requirements. loadingContext.loader = tool.doc_loader loadingContext.avsc_names = tool.doc_schema loadingContext.metadata = tool.metadata loadingContext.skip_resolve_all = True - #with Perf(metrics, "load_tool"): - # tool = load_tool(tool.tool, loadingContext) workflow_wrapper = None - if submitting and not self.fast_submit: + if (submitting and not self.fast_submit) or runtimeContext.update_workflow or runtimeContext.create_workflow or runtimeContext.print_keep_deps: # upload workflow and get back the workflow wrapper - workflow_wrapper = new_upload_workflow(self, tool, job_order, - runtimeContext.project_uuid, - runtimeContext, - uuid=runtimeContext.update_workflow, - submit_runner_ram=runtimeContext.submit_runner_ram, - name=runtimeContext.name, - merged_map=merged_map, - submit_runner_image=runtimeContext.submit_runner_image, - git_info=git_info, - set_defaults=(runtimeContext.update_workflow or runtimeContext.create_workflow), - jobmapper=jobmapper) + workflow_wrapper = upload_workflow(self, tool, job_order, + runtimeContext.project_uuid, + runtimeContext, + uuid=runtimeContext.update_workflow, + submit_runner_ram=runtimeContext.submit_runner_ram, + name=runtimeContext.name, + merged_map=merged_map, + submit_runner_image=runtimeContext.submit_runner_image, + git_info=git_info, + set_defaults=(runtimeContext.update_workflow or runtimeContext.create_workflow), + jobmapper=jobmapper) if runtimeContext.update_workflow or runtimeContext.create_workflow: - # Now create a workflow record and exit. + # We're registering the workflow, so create or update + # the workflow record and then exit. uuid = make_workflow_record(self, workflow_wrapper, runtimeContext.name, tool, runtimeContext.project_uuid, runtimeContext.update_workflow) self.stdout.write(uuid + "\n") return (None, "success") + if runtimeContext.print_keep_deps: + # Just find and print out all the collection dependencies and exit + print_keep_deps(self, runtimeContext, merged_map, tool) + return (None, "success") + + # Did not register a workflow, we're going to submit + # it instead. loadingContext.loader.idx.clear() loadingContext.loader.idx["_:main"] = workflow_wrapper workflow_wrapper["id"] = "_:main" - # Reload just the wrapper workflow. + # Reload the minimal wrapper workflow. self.fast_submit = True - #print("bah bah", loadingContext.requirements) - #workflow_wrapper, _ = loadingContext.loader.resolve_all(cmap(workflow_wrapper), "_:main", checklinks=True) - - #tool = load_tool(workflow_wrapper[0], loadingContext) - #print("AAA", json.dumps(loadingContext.loader.idx["_:main"], indent=2)) tool = load_tool(workflow_wrapper, loadingContext) - loadingContext.loader.idx["_:main"] = workflow_wrapper - #print("BBB", json.dumps(loadingContext.loader.idx["_:main"], indent=2)) + if not submitting: + # If we are going to run the workflow now (rather than + # submit it), we need to update the workflow document + # replacing file references with keep references. If we + # are just going to construct a run submission, we don't + # need to do this. + update_from_merged_map(tool, merged_map) self.apply_reqs(job_order, tool) @@ -787,7 +791,10 @@ The 'jobs' API is no longer supported. runnerjob = None if runtimeContext.submit: - # Submit a runner job to run the workflow for us. + # We are submitting instead of running immediately. + # + # Create a "Runner job" that when run() is invoked, + # creates the container request to run the workflow. if self.work_api == "containers": if submitting: loadingContext.metadata = updated_tool.metadata.copy() @@ -816,11 +823,18 @@ The 'jobs' API is no longer supported. runtimeContext) if runtimeContext.submit and not runtimeContext.wait: + # User provided --no-wait so submit the container request, + # get the container request uuid, print it out, and exit. runnerjob = next(jobiter) runnerjob.run(runtimeContext) self.stdout.write(runnerjob.uuid+"\n") return (None, "success") + # We either running the workflow directly, or submitting it + # and will wait for a final result. + + self.runtime_status_update("activity", "workflow execution") + current_container = arvados_cwl.util.get_current_container(self.api, self.num_retries, logger) if current_container: logger.info("Running inside container %s", current_container.get("uuid")) @@ -858,7 +872,8 @@ The 'jobs' API is no longer supported. if (self.task_queue.in_flight + len(self.processes)) > 0: self.workflow_eval_lock.wait(3) else: - logger.error("Workflow is deadlocked, no runnable processes and not waiting on any pending processes.") + if self.final_status is None: + logger.error("Workflow is deadlocked, no runnable processes and not waiting on any pending processes.") break if self.stop_polling.is_set(): @@ -908,6 +923,11 @@ The 'jobs' API is no longer supported. if self.final_output is None: raise WorkflowException("Workflow did not return a result.") + if runtimeContext.usage_report_notes: + logger.info("Steps with low resource utilization (possible optimization opportunities):") + for x in runtimeContext.usage_report_notes: + logger.info(" %s", x) + if runtimeContext.submit and isinstance(tool, Runner): logger.info("Final output collection %s", tool.final_output) if workbench2 or workbench1: