19744: Report steps with low utilization at end of workflow
[arvados.git] / sdk / cwl / arvados_cwl / executor.py
index c42d7bf32542439668576b8e6217e2dec45d7718..432b380aabcd90c4c91ff3d7d72a9af29ab52823 100644 (file)
@@ -34,9 +34,9 @@ from arvados.errors import ApiError
 
 import arvados_cwl.util
 from .arvcontainer import RunnerContainer, cleanup_name_for_collection
-from .runner import Runner, upload_docker, upload_job_order, upload_workflow_deps, make_builder, update_from_merged_map
+from .runner import Runner, upload_docker, upload_job_order, upload_workflow_deps, make_builder, update_from_merged_map, print_keep_deps
 from .arvtool import ArvadosCommandTool, validate_cluster_target, ArvadosExpressionTool
-from .arvworkflow import ArvadosWorkflow, upload_workflow, new_upload_workflow, make_workflow_record
+from .arvworkflow import ArvadosWorkflow, upload_workflow, make_workflow_record
 from .fsaccess import CollectionFsAccess, CollectionFetcher, collectionResolver, CollectionCache, pdh_size
 from .perf import Perf
 from .pathmapper import NoFollowPathMapper
@@ -70,7 +70,7 @@ class RuntimeStatusLoggingHandler(logging.Handler):
             kind = 'error'
         elif record.levelno >= logging.WARNING:
             kind = 'warning'
-        if kind == 'warning' and record.name == "salad":
+        if kind == 'warning' and record.name in ("salad", "crunchstat_summary"):
             # Don't send validation warnings to runtime status,
             # they're noisy and unhelpful.
             return
@@ -146,6 +146,7 @@ class ArvCwlExecutor(object):
         self.stdout = stdout
         self.fast_submit = False
         self.git_info = arvargs.git_info
+        self.debug = False
 
         if keep_client is not None:
             self.keep_client = keep_client
@@ -266,7 +267,7 @@ The 'jobs' API is no longer supported.
         activity statuses, for example in the RuntimeStatusLoggingHandler.
         """
 
-        if kind not in ('error', 'warning'):
+        if kind not in ('error', 'warning', 'activity'):
             # Ignore any other status kind
             return
 
@@ -281,7 +282,7 @@ The 'jobs' API is no longer supported.
             runtime_status = current.get('runtime_status', {})
 
             original_updatemessage = updatemessage = runtime_status.get(kind, "")
-            if not updatemessage:
+            if kind == "activity" or not updatemessage:
                 updatemessage = message
 
             # Subsequent messages tacked on in detail
@@ -368,9 +369,11 @@ The 'jobs' API is no longer supported.
                 while keys:
                     page = keys[:pageSize]
                     try:
-                        proc_states = table.list(filters=[["uuid", "in", page]]).execute(num_retries=self.num_retries)
+                        proc_states = table.list(filters=[["uuid", "in", page]], select=["uuid", "container_uuid", "state", "log_uuid",
+                                                                                         "output_uuid", "modified_at", "properties",
+                                                                                         "runtime_constraints"]).execute(num_retries=self.num_retries)
                     except Exception as e:
-                        logger.exception("Error checking states on API server: %s", e)
+                        logger.warning("Temporary error checking states on API server: %s", e)
                         remain_wait = self.poll_interval
                         continue
 
@@ -593,6 +596,8 @@ The 'jobs' API is no longer supported.
     def arv_executor(self, updated_tool, job_order, runtimeContext, logger=None):
         self.debug = runtimeContext.debug
 
+        self.runtime_status_update("activity", "initialization")
+
         git_info = self.get_git_info(updated_tool) if self.git_info else {}
         if git_info:
             logger.info("Git provenance")
@@ -600,6 +605,8 @@ The 'jobs' API is no longer supported.
                 if git_info[g]:
                     logger.info("  %s: %s", g.split("#", 1)[1], git_info[g])
 
+        runtimeContext.git_info = git_info
+
         workbench1 = self.api.config()["Services"]["Workbench1"]["ExternalURL"]
         workbench2 = self.api.config()["Services"]["Workbench2"]["ExternalURL"]
         controller = self.api.config()["Services"]["Controller"]["ExternalURL"]
@@ -646,6 +653,10 @@ The 'jobs' API is no longer supported.
             runtimeContext.copy_deps = True
             runtimeContext.match_local_docker = True
 
+        if runtimeContext.print_keep_deps:
+            runtimeContext.copy_deps = False
+            runtimeContext.match_local_docker = False
+
         if runtimeContext.update_workflow and self.project_uuid is None:
             # If we are updating a workflow, make sure anything that
             # gets uploaded goes into the same parent project, unless
@@ -655,34 +666,27 @@ The 'jobs' API is no longer supported.
 
         self.project_uuid = runtimeContext.project_uuid
 
+        self.runtime_status_update("activity", "data transfer")
+
         # Upload local file references in the job order.
         with Perf(metrics, "upload_job_order"):
             job_order, jobmapper = upload_job_order(self, "%s input" % runtimeContext.name,
                                          updated_tool, job_order, runtimeContext)
 
+        # determine if we are submitting or directly executing the workflow.
+        #
         # the last clause means: if it is a command line tool, and we
         # are going to wait for the result, and always_submit_runner
         # is false, then we don't submit a runner process.
 
-        submitting = (runtimeContext.update_workflow or
-                      runtimeContext.create_workflow or
-                      (runtimeContext.submit and not
+        submitting = (runtimeContext.submit and not
                        (updated_tool.tool["class"] == "CommandLineTool" and
                         runtimeContext.wait and
-                        not runtimeContext.always_submit_runner)))
+                        not runtimeContext.always_submit_runner))
 
         loadingContext = self.loadingContext.copy()
         loadingContext.do_validate = False
         loadingContext.disable_js_validation = True
-        # if submitting and not self.fast_submit:
-        #     loadingContext.do_update = False
-        #     # Document may have been auto-updated. Reload the original
-        #     # document with updating disabled because we want to
-        #     # submit the document with its original CWL version, not
-        #     # the auto-updated one.
-        #     with Perf(metrics, "load_tool original"):
-        #         tool = load_tool(updated_tool.tool["id"], loadingContext)
-        # else:
         tool = updated_tool
 
         # Upload direct dependencies of workflow steps, get back mapping of files to keep references.
@@ -692,52 +696,63 @@ The 'jobs' API is no longer supported.
             with Perf(metrics, "upload_workflow_deps"):
                 merged_map = upload_workflow_deps(self, tool, runtimeContext)
         else:
+            # in the fast submit case, we are running a workflow that
+            # has already been uploaded to Arvados, so we assume all
+            # the dependencies have been pinned to keep references and
+            # there is nothing to do.
             merged_map = {}
 
-        # Recreate process object (ArvadosWorkflow or
-        # ArvadosCommandTool) because tool document may have been
-        # updated by upload_workflow_deps in ways that modify
-        # hints or requirements.
         loadingContext.loader = tool.doc_loader
         loadingContext.avsc_names = tool.doc_schema
         loadingContext.metadata = tool.metadata
         loadingContext.skip_resolve_all = True
-        #with Perf(metrics, "load_tool"):
-        #    tool = load_tool(tool.tool, loadingContext)
 
         workflow_wrapper = None
-        if submitting and not self.fast_submit:
+        if (submitting and not self.fast_submit) or runtimeContext.update_workflow or runtimeContext.create_workflow or runtimeContext.print_keep_deps:
             # upload workflow and get back the workflow wrapper
 
-            workflow_wrapper = new_upload_workflow(self, tool, job_order,
-                                                   runtimeContext.project_uuid,
-                                                   runtimeContext,
-                                                   uuid=runtimeContext.update_workflow,
-                                                   submit_runner_ram=runtimeContext.submit_runner_ram,
-                                                   name=runtimeContext.name,
-                                                   merged_map=merged_map,
-                                                   submit_runner_image=runtimeContext.submit_runner_image,
-                                                   git_info=git_info,
-                                                   set_defaults=(runtimeContext.update_workflow or runtimeContext.create_workflow),
-                                                   jobmapper=jobmapper)
+            workflow_wrapper = upload_workflow(self, tool, job_order,
+                                               runtimeContext.project_uuid,
+                                               runtimeContext,
+                                               uuid=runtimeContext.update_workflow,
+                                               submit_runner_ram=runtimeContext.submit_runner_ram,
+                                               name=runtimeContext.name,
+                                               merged_map=merged_map,
+                                               submit_runner_image=runtimeContext.submit_runner_image,
+                                               git_info=git_info,
+                                               set_defaults=(runtimeContext.update_workflow or runtimeContext.create_workflow),
+                                               jobmapper=jobmapper)
 
             if runtimeContext.update_workflow or runtimeContext.create_workflow:
-                # Now create a workflow record and exit.
+                # We're registering the workflow, so create or update
+                # the workflow record and then exit.
                 uuid = make_workflow_record(self, workflow_wrapper, runtimeContext.name, tool,
                                             runtimeContext.project_uuid, runtimeContext.update_workflow)
                 self.stdout.write(uuid + "\n")
                 return (None, "success")
 
+            if runtimeContext.print_keep_deps:
+                # Just find and print out all the collection dependencies and exit
+                print_keep_deps(self, runtimeContext, merged_map, tool)
+                return (None, "success")
+
+            # Did not register a workflow, we're going to submit
+            # it instead.
             loadingContext.loader.idx.clear()
             loadingContext.loader.idx["_:main"] = workflow_wrapper
             workflow_wrapper["id"] = "_:main"
 
-            # Reload just the wrapper workflow.
+            # Reload the minimal wrapper workflow.
             self.fast_submit = True
             tool = load_tool(workflow_wrapper, loadingContext)
             loadingContext.loader.idx["_:main"] = workflow_wrapper
 
         if not submitting:
+            # If we are going to run the workflow now (rather than
+            # submit it), we need to update the workflow document
+            # replacing file references with keep references.  If we
+            # are just going to construct a run submission, we don't
+            # need to do this.
             update_from_merged_map(tool, merged_map)
 
         self.apply_reqs(job_order, tool)
@@ -782,7 +797,10 @@ The 'jobs' API is no longer supported.
 
         runnerjob = None
         if runtimeContext.submit:
-            # Submit a runner job to run the workflow for us.
+            # We are submitting instead of running immediately.
+            #
+            # Create a "Runner job" that when run() is invoked,
+            # creates the container request to run the workflow.
             if self.work_api == "containers":
                 if submitting:
                     loadingContext.metadata = updated_tool.metadata.copy()
@@ -811,11 +829,18 @@ The 'jobs' API is no longer supported.
                            runtimeContext)
 
         if runtimeContext.submit and not runtimeContext.wait:
+            # User provided --no-wait so submit the container request,
+            # get the container request uuid, print it out, and exit.
             runnerjob = next(jobiter)
             runnerjob.run(runtimeContext)
             self.stdout.write(runnerjob.uuid+"\n")
             return (None, "success")
 
+        # We either running the workflow directly, or submitting it
+        # and will wait for a final result.
+
+        self.runtime_status_update("activity", "workflow execution")
+
         current_container = arvados_cwl.util.get_current_container(self.api, self.num_retries, logger)
         if current_container:
             logger.info("Running inside container %s", current_container.get("uuid"))
@@ -853,7 +878,8 @@ The 'jobs' API is no longer supported.
                     if (self.task_queue.in_flight + len(self.processes)) > 0:
                         self.workflow_eval_lock.wait(3)
                     else:
-                        logger.error("Workflow is deadlocked, no runnable processes and not waiting on any pending processes.")
+                        if self.final_status is None:
+                            logger.error("Workflow is deadlocked, no runnable processes and not waiting on any pending processes.")
                         break
 
                 if self.stop_polling.is_set():
@@ -903,6 +929,11 @@ The 'jobs' API is no longer supported.
         if self.final_output is None:
             raise WorkflowException("Workflow did not return a result.")
 
+        if runtimeContext.usage_report_notes:
+            logger.info("Steps with low resource utilization (possible optimization opportunities):")
+            for x in runtimeContext.usage_report_notes:
+                logger.info("  %s", x)
+
         if runtimeContext.submit and isinstance(tool, Runner):
             logger.info("Final output collection %s", tool.final_output)
             if workbench2 or workbench1: