updated_tool.visit(self.check_features)
- self.project_uuid = runtimeContext.project_uuid
self.pipeline = None
self.fs_access = runtimeContext.make_fs_access(runtimeContext.basedir)
self.secret_store = runtimeContext.secret_store
# gets uploaded goes into the same parent project, unless
# an alternate --project-uuid was provided.
existing_wf = self.api.workflows().get(uuid=runtimeContext.update_workflow).execute()
- self.project_uuid = existing_wf["owner_uuid"]
+ runtimeContext.project_uuid = existing_wf["owner_uuid"]
+
+ self.project_uuid = runtimeContext.project_uuid
# Upload local file references in the job order.
job_order = upload_job_order(self, "%s input" % runtimeContext.name,
# Create a pipeline template or workflow record and exit.
if self.work_api == "containers":
uuid = upload_workflow(self, tool, job_order,
- self.project_uuid,
+ runtimeContext.project_uuid,
runtimeContext,
uuid=runtimeContext.update_workflow,
submit_runner_ram=runtimeContext.submit_runner_ram,
raise Exception("Docker image %s is not available\n%s" % (img, e) )
-def upload_workflow_collection(arvrunner, name, packed):
+def upload_workflow_collection(arvrunner, name, packed, runtimeContext):
collection = arvados.collection.Collection(api_client=arvrunner.api,
keep_client=arvrunner.keep_client,
num_retries=arvrunner.num_retries)
filters = [["portable_data_hash", "=", collection.portable_data_hash()],
["name", "like", name+"%"]]
- if arvrunner.project_uuid:
- filters.append(["owner_uuid", "=", arvrunner.project_uuid])
+ if runtimeContext.project_uuid:
+ filters.append(["owner_uuid", "=", runtimeContext.project_uuid])
exists = arvrunner.api.collections().list(filters=filters).execute(num_retries=arvrunner.num_retries)
if exists["items"]:
logger.info("Using collection %s", exists["items"][0]["uuid"])
else:
collection.save_new(name=name,
- owner_uuid=arvrunner.project_uuid,
+ owner_uuid=runtimeContext.project_uuid,
ensure_unique_name=True,
num_retries=arvrunner.num_retries)
logger.info("Uploaded to %s", collection.manifest_locator())
def check_contents(group, wf_uuid):
contents = api.groups().contents(uuid=group["uuid"]).execute()
if len(contents["items"]) != 3:
- raise Exception("Expected 3 items")
+ raise Exception("Expected 3 items in "+group["uuid"]+" was "+len(contents["items"]))
found = False
for c in contents["items"]:
if c["kind"] == "arvados#workflow" and c["uuid"] == wf_uuid:
found = True
if not found:
- raise Exception("Couldn't find workflow")
+ raise Exception("Couldn't find workflow in "+group["uuid"])
found = False
for c in contents["items"]:
raise Exception("Expected 0 items")
# Create workflow, by default should also copy dependencies
- wf_uuid = subprocess.check_output(["arvados-cwl-runner", "--create-workflow", "--project-uuid", group["uuid"], "19070-copy-deps.cwl"])
+ cmd = ["arvados-cwl-runner", "--create-workflow", "--project-uuid", group["uuid"], "19070-copy-deps.cwl"]
+ print(" ".join(cmd))
+ wf_uuid = subprocess.check_output(cmd)
wf_uuid = wf_uuid.decode("utf-8").strip()
check_contents(group, wf_uuid)
finally:
raise Exception("Expected 0 items")
# Create workflow, but with --no-copy-deps it shouldn't copy anything
- wf_uuid = subprocess.check_output(["arvados-cwl-runner", "--no-copy-deps", "--create-workflow", "--project-uuid", group["uuid"], "19070-copy-deps.cwl"])
+ cmd = ["arvados-cwl-runner", "--no-copy-deps", "--create-workflow", "--project-uuid", group["uuid"], "19070-copy-deps.cwl"]
+ print(" ".join(cmd))
+ wf_uuid = subprocess.check_output(cmd)
wf_uuid = wf_uuid.decode("utf-8").strip()
contents = api.groups().contents(uuid=group["uuid"]).execute()
raise Exception("Couldn't find workflow")
# Updating by default will copy missing items
- wf_uuid = subprocess.check_output(["arvados-cwl-runner", "--update-workflow", wf_uuid, "19070-copy-deps.cwl"])
+ cmd = ["arvados-cwl-runner", "--update-workflow", wf_uuid, "19070-copy-deps.cwl"]
+ print(" ".join(cmd))
+ wf_uuid = subprocess.check_output(cmd)
wf_uuid = wf_uuid.decode("utf-8").strip()
check_contents(group, wf_uuid)
raise Exception("Expected 0 items")
# Execute workflow, shouldn't copy anything.
- wf_uuid = subprocess.check_output(["arvados-cwl-runner", "--project-uuid", group["uuid"], "19070-copy-deps.cwl"])
+ cmd = ["arvados-cwl-runner", "--project-uuid", group["uuid"], "19070-copy-deps.cwl"]
+ print(" ".join(cmd))
+ wf_uuid = subprocess.check_output(cmd)
wf_uuid = wf_uuid.decode("utf-8").strip()
contents = api.groups().contents(uuid=group["uuid"]).execute()
raise Exception("Didn't expect to find jobs image dependency")
# Execute workflow with --copy-deps
- wf_uuid = subprocess.check_output(["arvados-cwl-runner", "--project-uuid", group["uuid"], "--copy-deps", "19070-copy-deps.cwl"])
+ cmd = ["arvados-cwl-runner", "--project-uuid", group["uuid"], "--copy-deps", "19070-copy-deps.cwl"]
+ print(" ".join(cmd))
+ wf_uuid = subprocess.check_output(cmd)
wf_uuid = wf_uuid.decode("utf-8").strip()
contents = api.groups().contents(uuid=group["uuid"]).execute()