from .arvcontainer import ArvadosContainer, RunnerContainer
from .arvjob import ArvadosJob, RunnerJob, RunnerTemplate
-from. runner import Runner, upload_docker, upload_job_order, upload_workflow_deps, upload_dependencies
+from. runner import Runner, upload_docker, upload_job_order, upload_workflow_deps
from .arvtool import ArvadosCommandTool
from .arvworkflow import ArvadosWorkflow, upload_workflow
from .fsaccess import CollectionFsAccess, CollectionFetcher, collectionResolver, CollectionCache
self.workflow_eval_lock = threading.Condition(threading.RLock())
self.final_output = None
self.final_status = None
- self.uploaded = {}
self.num_retries = num_retries
self.uuid = None
self.stop_polling = threading.Event()
with self.workflow_eval_lock:
self.processes[container.uuid] = container
- def process_done(self, uuid):
+ def process_done(self, uuid, record):
with self.workflow_eval_lock:
- if uuid in self.processes:
- del self.processes[uuid]
+ j = self.processes[uuid]
+ logger.info("%s %s is %s", self.label(j), uuid, record["state"])
+ self.task_queue.add(partial(j.done, record))
+ del self.processes[uuid]
def wrapped_callback(self, cb, obj, st):
with self.workflow_eval_lock:
j.update_pipeline_component(event["properties"]["new_attributes"])
logger.info("%s %s is Running", self.label(j), uuid)
elif event["properties"]["new_attributes"]["state"] in ("Complete", "Failed", "Cancelled", "Final"):
- with self.workflow_eval_lock:
- j = self.processes[uuid]
- self.task_queue.add(partial(j.done, event["properties"]["new_attributes"]))
- logger.info("%s %s is %s", self.label(j), uuid, event["properties"]["new_attributes"]["state"])
+ self.process_done(uuid, event["properties"]["new_attributes"])
def label(self, obj):
return "[%s %s]" % (self.work_api[0:-1], obj.name)
finally:
self.stop_polling.set()
- def get_uploaded(self):
- return self.uploaded.copy()
-
- def add_uploaded(self, src, pair):
- self.uploaded[src] = pair
-
def add_intermediate_output(self, uuid):
if uuid:
self.intermediate_output_collections.append(uuid)
if self.intermediate_output_ttl < 0:
raise Exception("Invalid value %d for --intermediate-output-ttl, cannot be less than zero" % self.intermediate_output_ttl)
+ if kwargs.get("submit_request_uuid") and self.work_api != "containers":
+ raise Exception("--submit-request-uuid requires containers API, but using '{}' api".format(self.work_api))
+
if not kwargs.get("name"):
kwargs["name"] = self.name = tool.tool.get("label") or tool.metadata.get("label") or os.path.basename(tool.tool["id"])
help="Docker image for workflow runner job, default arvados/jobs:%s" % __version__,
default=None)
+ parser.add_argument("--submit-request-uuid", type=str,
+ default=None,
+ help="Update and commit supplied container request instead of creating a new one (containers API only).")
+
parser.add_argument("--name", type=str,
help="Name to use for workflow execution instance.",
default=None)