X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/8c8f10692679395046e76c5fc6c8d95733c71232..7fec33bab2fb68405a1c641d3cd956d21487e14b:/sdk/cwl/arvados_cwl/executor.py diff --git a/sdk/cwl/arvados_cwl/executor.py b/sdk/cwl/arvados_cwl/executor.py index 92894b36c9..330dba3dbe 100644 --- a/sdk/cwl/arvados_cwl/executor.py +++ b/sdk/cwl/arvados_cwl/executor.py @@ -34,7 +34,7 @@ from arvados.errors import ApiError import arvados_cwl.util from .arvcontainer import RunnerContainer, cleanup_name_for_collection -from .runner import Runner, upload_docker, upload_job_order, upload_workflow_deps, make_builder, update_from_merged_map +from .runner import Runner, upload_docker, upload_job_order, upload_workflow_deps, make_builder, update_from_merged_map, print_keep_deps from .arvtool import ArvadosCommandTool, validate_cluster_target, ArvadosExpressionTool from .arvworkflow import ArvadosWorkflow, upload_workflow, make_workflow_record from .fsaccess import CollectionFsAccess, CollectionFetcher, collectionResolver, CollectionCache, pdh_size @@ -281,7 +281,7 @@ The 'jobs' API is no longer supported. runtime_status = current.get('runtime_status', {}) original_updatemessage = updatemessage = runtime_status.get(kind, "") - if not updatemessage: + if kind == "activity" or not updatemessage: updatemessage = message # Subsequent messages tacked on in detail @@ -368,9 +368,10 @@ The 'jobs' API is no longer supported. while keys: page = keys[:pageSize] try: - proc_states = table.list(filters=[["uuid", "in", page]]).execute(num_retries=self.num_retries) + proc_states = table.list(filters=[["uuid", "in", page]], select=["uuid", "container_uuid", "state", "log_uuid", + "output_uuid", "modified_at", "properties"]).execute(num_retries=self.num_retries) except Exception as e: - logger.exception("Error checking states on API server: %s", e) + logger.warning("Temporary error checking states on API server: %s", e) remain_wait = self.poll_interval continue @@ -648,6 +649,10 @@ The 'jobs' API is no longer supported. runtimeContext.copy_deps = True runtimeContext.match_local_docker = True + if runtimeContext.print_keep_deps: + runtimeContext.copy_deps = False + runtimeContext.match_local_docker = False + if runtimeContext.update_workflow and self.project_uuid is None: # If we are updating a workflow, make sure anything that # gets uploaded goes into the same parent project, unless @@ -670,12 +675,10 @@ The 'jobs' API is no longer supported. # are going to wait for the result, and always_submit_runner # is false, then we don't submit a runner process. - submitting = (runtimeContext.update_workflow or - runtimeContext.create_workflow or - (runtimeContext.submit and not + submitting = (runtimeContext.submit and not (updated_tool.tool["class"] == "CommandLineTool" and runtimeContext.wait and - not runtimeContext.always_submit_runner))) + not runtimeContext.always_submit_runner)) loadingContext = self.loadingContext.copy() loadingContext.do_validate = False @@ -701,7 +704,7 @@ The 'jobs' API is no longer supported. loadingContext.skip_resolve_all = True workflow_wrapper = None - if submitting and not self.fast_submit: + if (submitting and not self.fast_submit) or runtimeContext.update_workflow or runtimeContext.create_workflow or runtimeContext.print_keep_deps: # upload workflow and get back the workflow wrapper workflow_wrapper = upload_workflow(self, tool, job_order, @@ -724,6 +727,11 @@ The 'jobs' API is no longer supported. self.stdout.write(uuid + "\n") return (None, "success") + if runtimeContext.print_keep_deps: + # Just find and print out all the collection dependencies and exit + print_keep_deps(tool) + return (None, "success") + # Did not register a workflow, we're going to submit # it instead. loadingContext.loader.idx.clear()