X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/d4bb1f8a3c68288f45d2c1fa618c333825fcc8d0..5c9aedbd8e623a518f5e1a92e4064a25ddd66353:/sdk/cwl/arvados_cwl/__init__.py diff --git a/sdk/cwl/arvados_cwl/__init__.py b/sdk/cwl/arvados_cwl/__init__.py index 27af075f36..cd38003dae 100644 --- a/sdk/cwl/arvados_cwl/__init__.py +++ b/sdk/cwl/arvados_cwl/__init__.py @@ -15,15 +15,18 @@ import pkg_resources # part of setuptools from cwltool.errors import WorkflowException import cwltool.main import cwltool.workflow +import schema_salad import arvados -import arvados.events import arvados.config from .arvcontainer import ArvadosContainer, RunnerContainer from .arvjob import ArvadosJob, RunnerJob, RunnerTemplate from .arvtool import ArvadosCommandTool +from .arvworkflow import ArvadosWorkflow, upload_workflow from .fsaccess import CollectionFsAccess +from .perf import Perf +from cwltool.pack import pack from cwltool.process import shortname, UnsupportedRequirement from cwltool.pathmapper import adjustFileObjs @@ -50,6 +53,9 @@ class ArvCwlRunner(object): self.num_retries = 4 self.uuid = None self.work_api = work_api + self.stop_polling = threading.Event() + self.poll_api = None + self.pipeline = None if self.work_api is None: # todo: autodetect API to use. @@ -58,9 +64,12 @@ class ArvCwlRunner(object): if self.work_api not in ("containers", "jobs"): raise Exception("Unsupported API '%s'" % self.work_api) - def arvMakeTool(self, toolpath_object, **kwargs): + def arv_make_tool(self, toolpath_object, **kwargs): + kwargs["work_api"] = self.work_api if "class" in toolpath_object and toolpath_object["class"] == "CommandLineTool": - return ArvadosCommandTool(self, toolpath_object, work_api=self.work_api, **kwargs) + return ArvadosCommandTool(self, toolpath_object, **kwargs) + elif "class" in toolpath_object and toolpath_object["class"] == "Workflow": + return ArvadosWorkflow(self, toolpath_object, **kwargs) else: return cwltool.workflow.defaultMakeTool(toolpath_object, **kwargs) @@ -94,11 +103,47 @@ class ArvCwlRunner(object): self.cond.acquire() j = self.processes[uuid] logger.info("Job %s (%s) is %s", j.name, uuid, event["properties"]["new_attributes"]["state"]) - j.done(event["properties"]["new_attributes"]) + with Perf(logger, "done %s" % j.name): + j.done(event["properties"]["new_attributes"]) self.cond.notify() finally: self.cond.release() + def poll_states(self): + """Poll status of jobs or containers listed in the processes dict. + + Runs in a separate thread. + """ + + while True: + self.stop_polling.wait(15) + if self.stop_polling.is_set(): + break + with self.lock: + keys = self.processes.keys() + if not keys: + continue + + if self.work_api == "containers": + table = self.poll_api.containers() + elif self.work_api == "jobs": + table = self.poll_api.jobs() + + try: + proc_states = table.list(filters=[["uuid", "in", keys]]).execute(num_retries=self.num_retries) + except Exception as e: + logger.warn("Error checking states on API server: %s", e) + continue + + for p in proc_states["items"]: + self.on_message({ + "object_uuid": p["uuid"], + "event_type": "update", + "properties": { + "new_attributes": p + } + }) + def get_uploaded(self): return self.uploaded.copy() @@ -115,15 +160,11 @@ class ArvCwlRunner(object): for v in obj: self.check_writable(v) - def arvExecutor(self, tool, job_order, **kwargs): + def arv_executor(self, tool, job_order, **kwargs): self.debug = kwargs.get("debug") tool.visit(self.check_writable) - if kwargs.get("quiet"): - logger.setLevel(logging.WARN) - logging.getLogger('arvados.arv-run').setLevel(logging.WARN) - useruuid = self.api.users().current().execute()["uuid"] self.project_uuid = kwargs.get("project_uuid") if kwargs.get("project_uuid") else useruuid self.pipeline = None @@ -136,7 +177,9 @@ class ArvCwlRunner(object): # cwltool.main will write our return value to stdout. return tmpl.uuid - self.debug = kwargs.get("debug") + if kwargs.get("create_workflow") or kwargs.get("update_workflow"): + return upload_workflow(self, tool, job_order, self.project_uuid, kwargs.get("update_workflow")) + self.ignore_docker_for_reuse = kwargs.get("ignore_docker_for_reuse") kwargs["make_fs_access"] = make_fs_access @@ -182,12 +225,9 @@ class ArvCwlRunner(object): runnerjob.run() return runnerjob.uuid - arvados.config.settings()["ARVADOS_DISABLE_WEBSOCKETS"] = "1" - - if self.work_api == "containers": - events = arvados.events.subscribe(arvados.api('v1'), [["object_uuid", "is_a", "arvados#container"]], self.on_message) - if self.work_api == "jobs": - events = arvados.events.subscribe(arvados.api('v1'), [["object_uuid", "is_a", "arvados#job"]], self.on_message) + self.poll_api = arvados.api('v1') + self.polling_thread = threading.Thread(target=self.poll_states) + self.polling_thread.start() if runnerjob: jobiter = iter((runnerjob,)) @@ -206,7 +246,8 @@ class ArvCwlRunner(object): for runnable in jobiter: if runnable: - runnable.run(**kwargs) + with Perf(logger, "run"): + runnable.run(**kwargs) else: if self.processes: self.cond.wait(1) @@ -217,7 +258,6 @@ class ArvCwlRunner(object): while self.processes: self.cond.wait(1) - events.close() except UnsupportedRequirement: raise except: @@ -233,6 +273,8 @@ class ArvCwlRunner(object): body={"priority": "0"}).execute(num_retries=self.num_retries) finally: self.cond.release() + self.stop_polling.set() + self.polling_thread.join() if self.final_status == "UnsupportedRequirement": raise UnsupportedRequirement("Check log for details.") @@ -290,7 +332,7 @@ def arg_parser(): # type: () -> argparse.ArgumentParser default=True, dest="enable_reuse", help="") - parser.add_argument("--project-uuid", type=str, help="Project that will own the workflow jobs, if not provided, will go to home project.") + parser.add_argument("--project-uuid", type=str, metavar="UUID", help="Project that will own the workflow jobs, if not provided, will go to home project.") parser.add_argument("--ignore-docker-for-reuse", action="store_true", help="Ignore Docker image version when deciding whether to reuse past jobs.", default=False) @@ -301,6 +343,8 @@ def arg_parser(): # type: () -> argparse.ArgumentParser exgroup.add_argument("--local", action="store_false", help="Run workflow on local host (submits jobs to Arvados).", default=True, dest="submit") exgroup.add_argument("--create-template", action="store_true", help="Create an Arvados pipeline template.") + exgroup.add_argument("--create-workflow", action="store_true", help="Create an Arvados workflow.") + exgroup.add_argument("--update-workflow", type=str, metavar="UUID", help="Update existing Arvados workflow with uuid.") exgroup = parser.add_mutually_exclusive_group() exgroup.add_argument("--wait", action="store_true", help="After submitting workflow runner job, wait for completion.", @@ -321,15 +365,27 @@ def arg_parser(): # type: () -> argparse.ArgumentParser return parser +def add_arv_hints(): + cache = {} + res = pkg_resources.resource_stream(__name__, 'arv-cwl-schema.yml') + cache["http://arvados.org/cwl"] = res.read() + res.close() + _, cwlnames, _, _ = cwltool.process.get_schema("v1.0") + _, extnames, _, _ = schema_salad.schema.load_schema("http://arvados.org/cwl", cache=cache) + for n in extnames.names: + if not cwlnames.has_name("http://arvados.org/cwl#"+n, ""): + cwlnames.add_name("http://arvados.org/cwl#"+n, "", extnames.get_name(n, "")) def main(args, stdout, stderr, api_client=None): parser = arg_parser() job_order_object = None arvargs = parser.parse_args(args) - if arvargs.create_template and not arvargs.job_order: + if (arvargs.create_template or arvargs.create_workflow or arvargs.update_workflow) and not arvargs.job_order: job_order_object = ({}, "") + add_arv_hints() + try: if api_client is None: api_client=arvados.api('v1', model=OrderedJsonModel()) @@ -338,14 +394,21 @@ def main(args, stdout, stderr, api_client=None): logger.error(e) return 1 + if arvargs.debug: + logger.setLevel(logging.DEBUG) + + if arvargs.quiet: + logger.setLevel(logging.WARN) + logging.getLogger('arvados.arv-run').setLevel(logging.WARN) + arvargs.conformance_test = None arvargs.use_container = True return cwltool.main.main(args=arvargs, stdout=stdout, stderr=stderr, - executor=runner.arvExecutor, - makeTool=runner.arvMakeTool, + executor=runner.arv_executor, + makeTool=runner.arv_make_tool, versionfunc=versionstring, job_order_object=job_order_object, make_fs_access=partial(CollectionFsAccess, api_client=api_client))