19744: Need to have runtime_constraints on hand
[arvados.git] / sdk / cwl / arvados_cwl / executor.py
index ef84dd4983c870d2779a3cf993bcaeff8aa13f6f..729baffe1d2ef50ba7950ee27f6bb5973ceed563 100644 (file)
@@ -34,7 +34,7 @@ from arvados.errors import ApiError
 
 import arvados_cwl.util
 from .arvcontainer import RunnerContainer, cleanup_name_for_collection
-from .runner import Runner, upload_docker, upload_job_order, upload_workflow_deps, make_builder, update_from_merged_map
+from .runner import Runner, upload_docker, upload_job_order, upload_workflow_deps, make_builder, update_from_merged_map, print_keep_deps
 from .arvtool import ArvadosCommandTool, validate_cluster_target, ArvadosExpressionTool
 from .arvworkflow import ArvadosWorkflow, upload_workflow, make_workflow_record
 from .fsaccess import CollectionFsAccess, CollectionFetcher, collectionResolver, CollectionCache, pdh_size
@@ -70,7 +70,7 @@ class RuntimeStatusLoggingHandler(logging.Handler):
             kind = 'error'
         elif record.levelno >= logging.WARNING:
             kind = 'warning'
-        if kind == 'warning' and record.name == "salad":
+        if kind == 'warning' and (record.name == "salad" or record.name == "crunchstat_summary"):
             # Don't send validation warnings to runtime status,
             # they're noisy and unhelpful.
             return
@@ -146,6 +146,7 @@ class ArvCwlExecutor(object):
         self.stdout = stdout
         self.fast_submit = False
         self.git_info = arvargs.git_info
+        self.debug = False
 
         if keep_client is not None:
             self.keep_client = keep_client
@@ -266,7 +267,7 @@ The 'jobs' API is no longer supported.
         activity statuses, for example in the RuntimeStatusLoggingHandler.
         """
 
-        if kind not in ('error', 'warning'):
+        if kind not in ('error', 'warning', 'activity'):
             # Ignore any other status kind
             return
 
@@ -281,7 +282,7 @@ The 'jobs' API is no longer supported.
             runtime_status = current.get('runtime_status', {})
 
             original_updatemessage = updatemessage = runtime_status.get(kind, "")
-            if not updatemessage:
+            if kind == "activity" or not updatemessage:
                 updatemessage = message
 
             # Subsequent messages tacked on in detail
@@ -368,9 +369,11 @@ The 'jobs' API is no longer supported.
                 while keys:
                     page = keys[:pageSize]
                     try:
-                        proc_states = table.list(filters=[["uuid", "in", page]]).execute(num_retries=self.num_retries)
+                        proc_states = table.list(filters=[["uuid", "in", page]], select=["uuid", "container_uuid", "state", "log_uuid",
+                                                                                         "output_uuid", "modified_at", "properties",
+                                                                                         "runtime_constraints"]).execute(num_retries=self.num_retries)
                     except Exception as e:
-                        logger.exception("Error checking states on API server: %s", e)
+                        logger.warning("Temporary error checking states on API server: %s", e)
                         remain_wait = self.poll_interval
                         continue
 
@@ -593,6 +596,8 @@ The 'jobs' API is no longer supported.
     def arv_executor(self, updated_tool, job_order, runtimeContext, logger=None):
         self.debug = runtimeContext.debug
 
+        self.runtime_status_update("activity", "initialization")
+
         git_info = self.get_git_info(updated_tool) if self.git_info else {}
         if git_info:
             logger.info("Git provenance")
@@ -600,6 +605,8 @@ The 'jobs' API is no longer supported.
                 if git_info[g]:
                     logger.info("  %s: %s", g.split("#", 1)[1], git_info[g])
 
+        runtimeContext.git_info = git_info
+
         workbench1 = self.api.config()["Services"]["Workbench1"]["ExternalURL"]
         workbench2 = self.api.config()["Services"]["Workbench2"]["ExternalURL"]
         controller = self.api.config()["Services"]["Controller"]["ExternalURL"]
@@ -646,6 +653,10 @@ The 'jobs' API is no longer supported.
             runtimeContext.copy_deps = True
             runtimeContext.match_local_docker = True
 
+        if runtimeContext.print_keep_deps:
+            runtimeContext.copy_deps = False
+            runtimeContext.match_local_docker = False
+
         if runtimeContext.update_workflow and self.project_uuid is None:
             # If we are updating a workflow, make sure anything that
             # gets uploaded goes into the same parent project, unless
@@ -655,6 +666,8 @@ The 'jobs' API is no longer supported.
 
         self.project_uuid = runtimeContext.project_uuid
 
+        self.runtime_status_update("activity", "data transfer")
+
         # Upload local file references in the job order.
         with Perf(metrics, "upload_job_order"):
             job_order, jobmapper = upload_job_order(self, "%s input" % runtimeContext.name,
@@ -666,12 +679,10 @@ The 'jobs' API is no longer supported.
         # are going to wait for the result, and always_submit_runner
         # is false, then we don't submit a runner process.
 
-        submitting = (runtimeContext.update_workflow or
-                      runtimeContext.create_workflow or
-                      (runtimeContext.submit and not
+        submitting = (runtimeContext.submit and not
                        (updated_tool.tool["class"] == "CommandLineTool" and
                         runtimeContext.wait and
-                        not runtimeContext.always_submit_runner)))
+                        not runtimeContext.always_submit_runner))
 
         loadingContext = self.loadingContext.copy()
         loadingContext.do_validate = False
@@ -697,7 +708,7 @@ The 'jobs' API is no longer supported.
         loadingContext.skip_resolve_all = True
 
         workflow_wrapper = None
-        if submitting and not self.fast_submit:
+        if (submitting and not self.fast_submit) or runtimeContext.update_workflow or runtimeContext.create_workflow or runtimeContext.print_keep_deps:
             # upload workflow and get back the workflow wrapper
 
             workflow_wrapper = upload_workflow(self, tool, job_order,
@@ -720,6 +731,11 @@ The 'jobs' API is no longer supported.
                 self.stdout.write(uuid + "\n")
                 return (None, "success")
 
+            if runtimeContext.print_keep_deps:
+                # Just find and print out all the collection dependencies and exit
+                print_keep_deps(self, runtimeContext, merged_map, tool)
+                return (None, "success")
+
             # Did not register a workflow, we're going to submit
             # it instead.
             loadingContext.loader.idx.clear()
@@ -823,6 +839,8 @@ The 'jobs' API is no longer supported.
         # We either running the workflow directly, or submitting it
         # and will wait for a final result.
 
+        self.runtime_status_update("activity", "workflow execution")
+
         current_container = arvados_cwl.util.get_current_container(self.api, self.num_retries, logger)
         if current_container:
             logger.info("Running inside container %s", current_container.get("uuid"))
@@ -860,7 +878,8 @@ The 'jobs' API is no longer supported.
                     if (self.task_queue.in_flight + len(self.processes)) > 0:
                         self.workflow_eval_lock.wait(3)
                     else:
-                        logger.error("Workflow is deadlocked, no runnable processes and not waiting on any pending processes.")
+                        if self.final_status is None:
+                            logger.error("Workflow is deadlocked, no runnable processes and not waiting on any pending processes.")
                         break
 
                 if self.stop_polling.is_set():