projects
/
arvados.git
/ blobdiff
summary
|
shortlog
|
log
|
commit
|
commitdiff
|
tree
raw
|
inline
| side by side
Merge branch 'master' into 7478-anm-spot-instances
[arvados.git]
/
sdk
/
cwl
/
arvados_cwl
/
__init__.py
diff --git
a/sdk/cwl/arvados_cwl/__init__.py
b/sdk/cwl/arvados_cwl/__init__.py
index 12762118e62bc9fa1e2b8cc656056fbb60b01612..d509f400f1058396f2fc91e6ef320a2bbebe92e1 100644
(file)
--- a/
sdk/cwl/arvados_cwl/__init__.py
+++ b/
sdk/cwl/arvados_cwl/__init__.py
@@
-99,6
+99,11
@@
class ArvCwlRunner(object):
self.collection_cache = CollectionCache(self.api, self.keep_client, self.num_retries)
self.collection_cache = CollectionCache(self.api, self.keep_client, self.num_retries)
+ self.fetcher_constructor = partial(CollectionFetcher,
+ api_client=self.api,
+ fs_access=CollectionFsAccess("", collection_cache=self.collection_cache),
+ num_retries=self.num_retries)
+
self.work_api = None
expected_api = ["jobs", "containers"]
for api in expected_api:
self.work_api = None
expected_api = ["jobs", "containers"]
for api in expected_api:
@@
-119,10
+124,7
@@
class ArvCwlRunner(object):
def arv_make_tool(self, toolpath_object, **kwargs):
kwargs["work_api"] = self.work_api
def arv_make_tool(self, toolpath_object, **kwargs):
kwargs["work_api"] = self.work_api
- kwargs["fetcher_constructor"] = partial(CollectionFetcher,
- api_client=self.api,
- fs_access=CollectionFsAccess("", collection_cache=self.collection_cache),
- num_retries=self.num_retries)
+ kwargs["fetcher_constructor"] = self.fetcher_constructor
kwargs["resolver"] = partial(collectionResolver, self.api, num_retries=self.num_retries)
if "class" in toolpath_object and toolpath_object["class"] == "CommandLineTool":
return ArvadosCommandTool(self, toolpath_object, **kwargs)
kwargs["resolver"] = partial(collectionResolver, self.api, num_retries=self.num_retries)
if "class" in toolpath_object and toolpath_object["class"] == "CommandLineTool":
return ArvadosCommandTool(self, toolpath_object, **kwargs)
@@
-155,10
+157,12
@@
class ArvCwlRunner(object):
with self.workflow_eval_lock:
self.processes[container.uuid] = container
with self.workflow_eval_lock:
self.processes[container.uuid] = container
- def process_done(self, uuid):
+ def process_done(self, uuid
, record
):
with self.workflow_eval_lock:
with self.workflow_eval_lock:
- if uuid in self.processes:
- del self.processes[uuid]
+ j = self.processes[uuid]
+ logger.info("%s %s is %s", self.label(j), uuid, record["state"])
+ self.task_queue.add(partial(j.done, record))
+ del self.processes[uuid]
def wrapped_callback(self, cb, obj, st):
with self.workflow_eval_lock:
def wrapped_callback(self, cb, obj, st):
with self.workflow_eval_lock:
@@
-179,10
+183,7
@@
class ArvCwlRunner(object):
j.update_pipeline_component(event["properties"]["new_attributes"])
logger.info("%s %s is Running", self.label(j), uuid)
elif event["properties"]["new_attributes"]["state"] in ("Complete", "Failed", "Cancelled", "Final"):
j.update_pipeline_component(event["properties"]["new_attributes"])
logger.info("%s %s is Running", self.label(j), uuid)
elif event["properties"]["new_attributes"]["state"] in ("Complete", "Failed", "Cancelled", "Final"):
- with self.workflow_eval_lock:
- j = self.processes[uuid]
- self.task_queue.add(partial(j.done, event["properties"]["new_attributes"]))
- logger.info("%s %s is %s", self.label(j), uuid, event["properties"]["new_attributes"]["state"])
+ self.process_done(uuid, event["properties"]["new_attributes"])
def label(self, obj):
return "[%s %s]" % (self.work_api[0:-1], obj.name)
def label(self, obj):
return "[%s %s]" % (self.work_api[0:-1], obj.name)