from cwltool.errors import WorkflowException
import cwltool.workflow
-from schema_salad.sourceline import SourceLine
+from schema_salad.sourceline import SourceLine, cmap
import schema_salad.validate as validate
from schema_salad.ref_resolver import file_uri, uri_file_path
from .arvcontainer import RunnerContainer, cleanup_name_for_collection
from .runner import Runner, upload_docker, upload_job_order, upload_workflow_deps, make_builder
from .arvtool import ArvadosCommandTool, validate_cluster_target, ArvadosExpressionTool
-from .arvworkflow import ArvadosWorkflow, upload_workflow
+from .arvworkflow import ArvadosWorkflow, upload_workflow, new_upload_workflow, make_workflow_record
from .fsaccess import CollectionFsAccess, CollectionFetcher, collectionResolver, CollectionCache, pdh_size
from .perf import Perf
from .pathmapper import NoFollowPathMapper
kind = 'error'
elif record.levelno >= logging.WARNING:
kind = 'warning'
+ if kind == 'warning' and record.name == "salad":
+ # Don't send validation warnings to runtime status,
+ # they're noisy and unhelpful.
+ return
if kind is not None and self.updatingRuntimeStatus is not True:
self.updatingRuntimeStatus = True
try:
arvargs.output_tags = None
arvargs.thread_count = 1
arvargs.collection_cache_size = None
+ arvargs.git_info = True
+ arvargs.submit = False
+ arvargs.defer_downloads = False
self.api = api_client
self.processes = {}
self.fs_access = None
self.secret_store = None
self.stdout = stdout
+ self.fast_submit = False
+ self.git_info = arvargs.git_info
if keep_client is not None:
self.keep_client = keep_client
self.toplevel_runtimeContext.make_fs_access = partial(CollectionFsAccess,
collection_cache=self.collection_cache)
+ self.defer_downloads = arvargs.submit and arvargs.defer_downloads
+
validate_cluster_target(self, self.toplevel_runtimeContext)
page = keys[:pageSize]
try:
proc_states = table.list(filters=[["uuid", "in", page]]).execute(num_retries=self.num_retries)
- except Exception:
- logger.exception("Error checking states on API server: %s")
+ except Exception as e:
+ logger.exception("Error checking states on API server: %s", e)
remain_wait = self.poll_interval
continue
git_commit = subprocess.run(["git", "log", "--format=%H", "-n1", "HEAD"], cwd=cwd, capture_output=True, text=True).stdout
git_date = subprocess.run(["git", "log", "--format=%cD", "-n1", "HEAD"], cwd=cwd, capture_output=True, text=True).stdout
git_committer = subprocess.run(["git", "log", "--format=%cn <%ce>", "-n1", "HEAD"], cwd=cwd, capture_output=True, text=True).stdout
- git_branch = subprocess.run(["git", "branch", "--show-current"], cwd=cwd, capture_output=True, text=True).stdout
+ git_branch = subprocess.run(["git", "rev-parse", "--abbrev-ref", "HEAD"], cwd=cwd, capture_output=True, text=True).stdout
git_origin = subprocess.run(["git", "remote", "get-url", "origin"], cwd=cwd, capture_output=True, text=True).stdout
git_status = subprocess.run(["git", "status", "--untracked-files=no", "--porcelain"], cwd=cwd, capture_output=True, text=True).stdout
- git_describe = subprocess.run(["git", "describe", "--always"], cwd=cwd, capture_output=True, text=True).stdout
+ git_describe = subprocess.run(["git", "describe", "--always", "--tags"], cwd=cwd, capture_output=True, text=True).stdout
git_toplevel = subprocess.run(["git", "rev-parse", "--show-toplevel"], cwd=cwd, capture_output=True, text=True).stdout
git_path = filepath[len(git_toplevel):]
def arv_executor(self, updated_tool, job_order, runtimeContext, logger=None):
self.debug = runtimeContext.debug
- git_info = self.get_git_info(updated_tool)
+ git_info = self.get_git_info(updated_tool) if self.git_info else {}
if git_info:
logger.info("Git provenance")
for g in git_info:
controller = self.api.config()["Services"]["Controller"]["ExternalURL"]
logger.info("Using cluster %s (%s)", self.api.config()["ClusterID"], workbench2 or workbench1 or controller)
- updated_tool.visit(self.check_features)
+ if not self.fast_submit:
+ updated_tool.visit(self.check_features)
self.pipeline = None
self.fs_access = runtimeContext.make_fs_access(runtimeContext.basedir)
loadingContext = self.loadingContext.copy()
loadingContext.do_validate = False
loadingContext.disable_js_validation = True
- if submitting:
- loadingContext.do_update = False
- # Document may have been auto-updated. Reload the original
- # document with updating disabled because we want to
- # submit the document with its original CWL version, not
- # the auto-updated one.
- with Perf(metrics, "load_tool original"):
- tool = load_tool(updated_tool.tool["id"], loadingContext)
- else:
- tool = updated_tool
+ # if submitting and not self.fast_submit:
+ # loadingContext.do_update = False
+ # # Document may have been auto-updated. Reload the original
+ # # document with updating disabled because we want to
+ # # submit the document with its original CWL version, not
+ # # the auto-updated one.
+ # with Perf(metrics, "load_tool original"):
+ # tool = load_tool(updated_tool.tool["id"], loadingContext)
+ # else:
+ tool = updated_tool
# Upload direct dependencies of workflow steps, get back mapping of files to keep references.
# Also uploads docker images.
- logger.info("Uploading workflow dependencies")
- with Perf(metrics, "upload_workflow_deps"):
- merged_map = upload_workflow_deps(self, tool, runtimeContext)
+ if not self.fast_submit:
+ logger.info("Uploading workflow dependencies")
+ with Perf(metrics, "upload_workflow_deps"):
+ merged_map = upload_workflow_deps(self, tool, runtimeContext)
+ else:
+ merged_map = {}
# Recreate process object (ArvadosWorkflow or
# ArvadosCommandTool) because tool document may have been
# updated by upload_workflow_deps in ways that modify
- # inheritance of hints or requirements.
+ # hints or requirements.
loadingContext.loader = tool.doc_loader
loadingContext.avsc_names = tool.doc_schema
loadingContext.metadata = tool.metadata
- with Perf(metrics, "load_tool"):
- tool = load_tool(tool.tool, loadingContext)
-
- if runtimeContext.update_workflow or runtimeContext.create_workflow:
- # Create a pipeline template or workflow record and exit.
- if self.work_api == "containers":
- uuid = upload_workflow(self, tool, job_order,
- runtimeContext.project_uuid,
- runtimeContext,
- uuid=runtimeContext.update_workflow,
- submit_runner_ram=runtimeContext.submit_runner_ram,
- name=runtimeContext.name,
- merged_map=merged_map,
- submit_runner_image=runtimeContext.submit_runner_image,
- git_info=git_info)
+ loadingContext.skip_resolve_all = True
+ #with Perf(metrics, "load_tool"):
+ # tool = load_tool(tool.tool, loadingContext)
+
+ if submitting and not self.fast_submit:
+ # upload workflow and get back the workflow wrapper
+
+ workflow_wrapper = new_upload_workflow(self, tool, job_order,
+ runtimeContext.project_uuid,
+ runtimeContext,
+ uuid=runtimeContext.update_workflow,
+ submit_runner_ram=runtimeContext.submit_runner_ram,
+ name=runtimeContext.name,
+ merged_map=merged_map,
+ submit_runner_image=runtimeContext.submit_runner_image,
+ git_info=git_info,
+ set_defaults=(runtimeContext.update_workflow or runtimeContext.create_workflow))
+
+ if runtimeContext.update_workflow or runtimeContext.create_workflow:
+ # Now create a workflow record and exit.
+ uuid = make_workflow_record(self, workflow_wrapper, runtimeContext.name, tool,
+ runtimeContext.project_uuid, runtimeContext.update_workflow)
self.stdout.write(uuid + "\n")
return (None, "success")
+ loadingContext.loader.idx.clear()
+ loadingContext.loader.idx["_:main"] = workflow_wrapper
+ workflow_wrapper["id"] = "_:main"
+
+ # Reload just the wrapper workflow.
+ self.fast_submit = True
+ #print("bah bah", loadingContext.requirements)
+ #workflow_wrapper, _ = loadingContext.loader.resolve_all(cmap(workflow_wrapper), "_:main", checklinks=True)
+
+ #tool = load_tool(workflow_wrapper[0], loadingContext)
+ #print(json.dumps(workflow_wrapper, indent=2))
+ tool = load_tool(workflow_wrapper, loadingContext)
+
+
self.apply_reqs(job_order, tool)
self.ignore_docker_for_reuse = runtimeContext.ignore_docker_for_reuse