self.collection_cache = CollectionCache(self.api, self.keep_client, self.num_retries)
+ self.fetcher_constructor = partial(CollectionFetcher,
+ api_client=self.api,
+ fs_access=CollectionFsAccess("", collection_cache=self.collection_cache),
+ num_retries=self.num_retries)
+
self.work_api = None
expected_api = ["jobs", "containers"]
for api in expected_api:
def arv_make_tool(self, toolpath_object, **kwargs):
kwargs["work_api"] = self.work_api
- kwargs["fetcher_constructor"] = partial(CollectionFetcher,
- api_client=self.api,
- fs_access=CollectionFsAccess("", collection_cache=self.collection_cache),
- num_retries=self.num_retries)
+ kwargs["fetcher_constructor"] = self.fetcher_constructor
kwargs["resolver"] = partial(collectionResolver, self.api, num_retries=self.num_retries)
if "class" in toolpath_object and toolpath_object["class"] == "CommandLineTool":
return ArvadosCommandTool(self, toolpath_object, **kwargs)
with self.workflow_eval_lock:
self.processes[container.uuid] = container
- def process_done(self, uuid):
+ def process_done(self, uuid, record):
with self.workflow_eval_lock:
- if uuid in self.processes:
- del self.processes[uuid]
+ j = self.processes[uuid]
+ logger.info("%s %s is %s", self.label(j), uuid, record["state"])
+ self.task_queue.add(partial(j.done, record))
+ del self.processes[uuid]
def wrapped_callback(self, cb, obj, st):
with self.workflow_eval_lock:
j.update_pipeline_component(event["properties"]["new_attributes"])
logger.info("%s %s is Running", self.label(j), uuid)
elif event["properties"]["new_attributes"]["state"] in ("Complete", "Failed", "Cancelled", "Final"):
- with self.workflow_eval_lock:
- j = self.processes[uuid]
- self.task_queue.add(partial(j.done, event["properties"]["new_attributes"]))
- logger.info("%s %s is %s", self.label(j), uuid, event["properties"]["new_attributes"]["state"])
+ self.process_done(uuid, event["properties"]["new_attributes"])
def label(self, obj):
return "[%s %s]" % (self.work_api[0:-1], obj.name)