owner_uuid: @object.uuid
}
})
- redirect_to root_url(api_token: resp[:api_token])
+ redirect_to root_url(api_token: "v2/#{resp[:uuid]}/#{resp[:api_token]}")
end
def home
attrs['state'] = "Uncommitted"
# required
- attrs['command'] = ["arvados-cwl-runner",
- "--local",
- "--api=containers",
- "--project-uuid=#{params['work_unit']['owner_uuid']}",
- "/var/lib/cwl/workflow.json#main",
- "/var/lib/cwl/cwl.input.json"]
attrs['container_image'] = "arvados/jobs"
attrs['cwd'] = "/var/spool/cwl"
attrs['output_path'] = "/var/spool/cwl"
"API" => true
}
+ keep_cache = 256
input_defaults = {}
if wf_json
main = get_cwl_main(wf_json)
if hint[:ramMin]
runtime_constraints["ram"] = hint[:ramMin] * 1024 * 1024
end
+ if hint[:keep_cache]
+ keep_cache = hint[:keep_cache]
+ end
end
end
end
end
+ attrs['command'] = ["arvados-cwl-runner",
+ "--local",
+ "--api=containers",
+ "--project-uuid=#{params['work_unit']['owner_uuid']}",
+ "--collection-keep-cache=#{keep_cache}",
+ "/var/lib/cwl/workflow.json#main",
+ "/var/lib/cwl/cwl.input.json"]
+
# mounts
mounts = {
"/var/lib/cwl/cwl.input.json" => {
assert_match /\/users\/welcome/, @response.redirect_url
end
+ test "'log in as user' feature uses a v2 token" do
+ post :sudo, {
+ id: api_fixture('users')['active']['uuid']
+ }, session_for('admin_trustedclient')
+ assert_response :redirect
+ assert_match /api_token=v2%2F/, @response.redirect_url
+ end
+
test "request shell access" do
user = api_fixture('users')['spectator']
#
# SPDX-License-Identifier: AGPL-3.0
+set -e
+
+arvados-cwl-runner --version
+
exec python <<EOF
import arvados_cwl
print "arvados-cwl-runner version", arvados_cwl.__version__
# clean up the docker build environment
cd "$WORKSPACE"
+if [[ -z "$ARVADOS_BUILDING_VERSION" ]] && ! [[ -z "$version_tag" ]]; then
+ ARVADOS_BUILDING_VERSION="$version_tag"
+ ARVADOS_BUILDING_ITERATION="1"
+fi
+
python_sdk_ts=$(cd sdk/python && timestamp_from_git)
cwl_runner_ts=$(cd sdk/cwl && timestamp_from_git)
echo cwl_runner_version $cwl_runner_version python_sdk_version $python_sdk_version
+if [[ "${python_sdk_version}" != "${ARVADOS_BUILDING_VERSION}" ]]; then
+ python_sdk_version="${python_sdk_version}-2"
+else
+ python_sdk_version="${ARVADOS_BUILDING_VERSION}-${ARVADOS_BUILDING_ITERATION}"
+fi
+
+cwl_runner_version_orig=$cwl_runner_version
+
+if [[ "${cwl_runner_version}" != "${ARVADOS_BUILDING_VERSION}" ]]; then
+ cwl_runner_version="${cwl_runner_version}-4"
+else
+ cwl_runner_version="${ARVADOS_BUILDING_VERSION}-${ARVADOS_BUILDING_ITERATION}"
+fi
+
cd docker/jobs
docker build $NOCACHE \
- --build-arg python_sdk_version=${python_sdk_version}-2 \
- --build-arg cwl_runner_version=${cwl_runner_version}-4 \
- -t arvados/jobs:$cwl_runner_version .
+ --build-arg python_sdk_version=${python_sdk_version} \
+ --build-arg cwl_runner_version=${cwl_runner_version} \
+ -t arvados/jobs:$cwl_runner_version_orig .
ECODE=$?
FORCE=-f
fi
if ! [[ -z "$version_tag" ]]; then
- docker tag $FORCE arvados/jobs:$cwl_runner_version arvados/jobs:"$version_tag"
+ docker tag $FORCE arvados/jobs:$cwl_runner_version_orig arvados/jobs:"$version_tag"
else
- docker tag $FORCE arvados/jobs:$cwl_runner_version arvados/jobs:latest
+ docker tag $FORCE arvados/jobs:$cwl_runner_version_orig arvados/jobs:latest
fi
ECODE=$?
if ! [[ -z "$version_tag" ]]; then
docker_push arvados/jobs:"$version_tag"
else
- docker_push arvados/jobs:$cwl_runner_version
+ docker_push arvados/jobs:$cwl_runner_version_orig
docker_push arvados/jobs:latest
fi
title "upload arvados images finished (`timer`)"
TODO: extract this information based on git commit messages and generate changelogs / release notes automatically.
{% endcomment %}
+h3. v1.3.0 (2018-12-05)
+
+This release includes several database migrations, which will be executed automatically as part of the API server upgrade. On large Arvados installations, these migrations will take a while. We've seen the upgrade take 30 minutes or more on installations with a lot of collections.
+
+The @arvados-controller@ component now requires the /etc/arvados/config.yml file to be present. See <a href="{{ site.baseurl }}/install/install-controller.html#configuration">the @arvados-controller@ installation instructions</a>.
+
+Support for the deprecated "jobs" API is broken in this release. Users who rely on it should not upgrade. This will be fixed in an upcoming 1.3.1 patch release, however users are "encouraged to migrate":upgrade-crunch2.html as support for the "jobs" API will be dropped in an upcoming release. Users who are already using the "containers" API are not affected.
+
h3. v1.2.1 (2018-11-26)
There are no special upgrade notes for this release.
</code></pre>
</notextile>
-h3. Configure arvados-controller
+h3(#configuration). Configure arvados-controller
Create the cluster configuration file @/etc/arvados/config.yml@ using the following template.
arv:WorkflowRunnerResources:
ramMin: 2048
coresMin: 2
+ keep_cache: 512
arv:ClusterTarget:
cluster_id: clsr1
project_uuid: clsr1-j7d0g-qxc4jcji7n4lafx
|_. Field |_. Type |_. Description |
|ramMin|int|RAM, in mebibytes, to reserve for the arvados-cwl-runner process. Default 1 GiB|
|coresMin|int|Number of cores to reserve to the arvados-cwl-runner process. Default 1 core.|
+|keep_cache|int|Size of collection metadata cache for the workflow runner, in MiB. Default 256 MiB. Will be added on to the RAM request when determining node size to request.|
h2(#clustertarget). arv:ClusterTarget
# apt.arvados.org
-deb http://apt.arvados.org/ jessie main
deb http://apt.arvados.org/ jessie-dev main
exit
end
-git_latest_tag = `git describe --abbrev=0`
+git_latest_tag = `git tag -l |sort -V -r |head -n1`
git_latest_tag = git_latest_tag.encode('utf-8').strip
git_timestamp, git_hash = `git log -n1 --first-parent --format=%ct:%H .`.chomp.split(":")
git_timestamp = Time.at(git_timestamp.to_i).utc
default=None,
metavar="CLUSTER_ID")
+ parser.add_argument("--collection-cache-size", type=int,
+ default=None,
+ help="Collection cache size (in MiB, default 256).")
+
parser.add_argument("--name", type=str,
help="Name to use for workflow execution instance.",
default=None)
help=argparse.SUPPRESS)
parser.add_argument("--thread-count", type=int,
- default=4, help="Number of threads to use for job submit and output collection.")
+ default=1, help="Number of threads to use for job submit and output collection.")
parser.add_argument("--http-timeout", type=int,
default=5*60, dest="http_timeout", help="API request timeout in seconds. Default is 300 seconds (5 minutes).")
type: int?
doc: Minimum cores allocated to cwl-runner
jsonldPredicate: "https://w3id.org/cwl/cwl#ResourceRequirement/coresMin"
+ keep_cache:
+ type: int?
+ doc: |
+ Size of collection metadata cache for the workflow runner, in
+ MiB. Default 256 MiB. Will be added on to the RAM request
+ when determining node size to request.
+ jsonldPredicate: "http://arvados.org/cwl#RuntimeConstraints/keep_cache"
- name: ClusterTarget
type: record
"secret_mounts": secret_mounts,
"runtime_constraints": {
"vcpus": math.ceil(self.submit_runner_cores),
- "ram": math.ceil(1024*1024 * self.submit_runner_ram),
+ "ram": 1024*1024 * (math.ceil(self.submit_runner_ram) + math.ceil(self.collection_cache_size)),
"API": True
},
"use_existing": self.enable_reuse,
"properties": {}
}
- if self.tool.tool.get("id", "").startswith("keep:"):
- sp = self.tool.tool["id"].split('/')
+ if self.embedded_tool.tool.get("id", "").startswith("keep:"):
+ sp = self.embedded_tool.tool["id"].split('/')
workflowcollection = sp[0][5:]
workflowname = "/".join(sp[1:])
workflowpath = "/var/lib/cwl/workflow/%s" % workflowname
"portable_data_hash": "%s" % workflowcollection
}
else:
- packed = packed_workflow(self.arvrunner, self.tool, self.merged_map)
+ packed = packed_workflow(self.arvrunner, self.embedded_tool, self.merged_map)
workflowpath = "/var/lib/cwl/workflow.json#main"
container_req["mounts"]["/var/lib/cwl/workflow.json"] = {
"kind": "json",
"content": packed
}
- if self.tool.tool.get("id", "").startswith("arvwf:"):
- container_req["properties"]["template_uuid"] = self.tool.tool["id"][6:33]
+ if self.embedded_tool.tool.get("id", "").startswith("arvwf:"):
+ container_req["properties"]["template_uuid"] = self.embedded_tool.tool["id"][6:33]
# --local means execute the workflow instead of submitting a container request
# --eval-timeout is the timeout for javascript invocation
# --parallel-task-count is the number of threads to use for job submission
# --enable/disable-reuse sets desired job reuse
+ # --collection-cache-size sets aside memory to store collections
command = ["arvados-cwl-runner",
"--local",
"--api=containers",
"--disable-validate",
"--eval-timeout=%s" % self.arvrunner.eval_timeout,
"--thread-count=%s" % self.arvrunner.thread_count,
- "--enable-reuse" if self.enable_reuse else "--disable-reuse"]
+ "--enable-reuse" if self.enable_reuse else "--disable-reuse",
+ "--collection-cache-size=%s" % self.collection_cache_size]
if self.output_name:
command.append("--output-name=" + self.output_name)
a pipeline template or pipeline instance.
"""
- if self.tool.tool["id"].startswith("keep:"):
- self.job_order["cwl:tool"] = self.tool.tool["id"][5:]
+ if self.embedded_tool.tool["id"].startswith("keep:"):
+ self.job_order["cwl:tool"] = self.embedded_tool.tool["id"][5:]
else:
- packed = packed_workflow(self.arvrunner, self.tool, self.merged_map)
+ packed = packed_workflow(self.arvrunner, self.embedded_tool, self.merged_map)
wf_pdh = upload_workflow_collection(self.arvrunner, self.name, packed)
self.job_order["cwl:tool"] = "%s/workflow.cwl#main" % wf_pdh
}
def __init__(self, runner, tool, job_order, enable_reuse, uuid,
- submit_runner_ram=0, name=None, merged_map=None):
+ submit_runner_ram=0, name=None, merged_map=None,
+ loadingContext=None):
self.runner = runner
- self.tool = tool
+ self.embedded_tool = tool
self.job = RunnerJob(
runner=runner,
tool=tool,
- job_order=job_order,
enable_reuse=enable_reuse,
output_name=None,
output_tags=None,
submit_runner_ram=submit_runner_ram,
name=name,
- merged_map=merged_map)
+ merged_map=merged_map,
+ loadingContext=loadingContext)
+ self.job.job_order = job_order
self.uuid = uuid
def pipeline_component_spec(self):
job_params = spec['script_parameters']
spec['script_parameters'] = {}
- for param in self.tool.tool['inputs']:
+ for param in self.embedded_tool.tool['inputs']:
param = copy.deepcopy(param)
# Data type and "required" flag...
#
# SPDX-License-Identifier: Apache-2.0
-from cwltool.command_line_tool import CommandLineTool
+from cwltool.command_line_tool import CommandLineTool, ExpressionTool
from cwltool.builder import Builder
from .arvjob import ArvadosJob
from .arvcontainer import ArvadosContainer
runtimeContext.tmpdir = "$(task.tmpdir)"
runtimeContext.docker_tmpdir = "$(task.tmpdir)"
return super(ArvadosCommandTool, self).job(joborder, output_callback, runtimeContext)
+
+class ArvadosExpressionTool(ExpressionTool):
+ def __init__(self, arvrunner, toolpath_object, loadingContext):
+ super(ArvadosExpressionTool, self).__init__(toolpath_object, loadingContext)
+ self.arvrunner = arvrunner
+
+ def job(self,
+ job_order, # type: Mapping[Text, Text]
+ output_callback, # type: Callable[[Any, Any], Any]
+ runtimeContext # type: RuntimeContext
+ ):
+ return super(ArvadosExpressionTool, self).job(job_order, self.arvrunner.get_wrapped_callback(output_callback), runtimeContext)
def job(self, joborder, output_callback, runtimeContext):
- builder = self._init_job(joborder, runtimeContext)
+ builder = make_builder(joborder, self.hints, self.requirements, runtimeContext)
runtimeContext = set_cluster_target(self.tool, self.arvrunner, builder, runtimeContext)
req, _ = self.get_requirement("http://arvados.org/cwl#RunInSingleContainer")
raise WorkflowException("Non-top-level ResourceRequirement in single container cannot have expressions")
if not dyn:
self.static_resource_req.append(req)
+ if req["class"] == "DockerRequirement":
+ if "http://arvados.org/cwl#dockerCollectionPDH" in req:
+ del req["http://arvados.org/cwl#dockerCollectionPDH"]
visit_class(packed["$graph"], ("Workflow", "CommandLineTool"), visit)
self.submit_runner_cluster = None
self.cluster_target_id = 0
self.always_submit_runner = False
+ self.collection_cache_size = 256
super(ArvRuntimeContext, self).__init__(kwargs)
arvargs.output_name = output_name
arvargs.output_tags = output_tags
arvargs.thread_count = 1
+ arvargs.collection_cache_size = None
runner = arvados_cwl.ArvCwlExecutor(api_client=arvados.safeapi.ThreadSafeApiCache(
api_params={"model": OrderedJsonModel()}, keep_params={"num_retries": 4}),
from .arvcontainer import RunnerContainer
from .arvjob import RunnerJob, RunnerTemplate
from .runner import Runner, upload_docker, upload_job_order, upload_workflow_deps
-from .arvtool import ArvadosCommandTool, validate_cluster_target
+from .arvtool import ArvadosCommandTool, validate_cluster_target, ArvadosExpressionTool
from .arvworkflow import ArvadosWorkflow, upload_workflow
-from .fsaccess import CollectionFsAccess, CollectionFetcher, collectionResolver, CollectionCache
+from .fsaccess import CollectionFsAccess, CollectionFetcher, collectionResolver, CollectionCache, pdh_size
from .perf import Perf
from .pathmapper import NoFollowPathMapper
from .task_queue import TaskQueue
from ._version import __version__
from cwltool.process import shortname, UnsupportedRequirement, use_custom_schema
-from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, get_listing
+from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, get_listing, visit_class
from cwltool.command_line_tool import compute_checksums
logger = logging.getLogger('arvados.cwl-runner')
arvargs.output_name = None
arvargs.output_tags = None
arvargs.thread_count = 1
+ arvargs.collection_cache_size = None
self.api = api_client
self.processes = {}
self.thread_count = arvargs.thread_count
self.poll_interval = 12
self.loadingContext = None
+ self.should_estimate_cache_size = True
if keep_client is not None:
self.keep_client = keep_client
else:
self.keep_client = arvados.keep.KeepClient(api_client=self.api, num_retries=self.num_retries)
- self.collection_cache = CollectionCache(self.api, self.keep_client, self.num_retries)
+ if arvargs.collection_cache_size:
+ collection_cache_size = arvargs.collection_cache_size*1024*1024
+ self.should_estimate_cache_size = False
+ else:
+ collection_cache_size = 256*1024*1024
+
+ self.collection_cache = CollectionCache(self.api, self.keep_client, self.num_retries,
+ cap=collection_cache_size)
self.fetcher_constructor = partial(CollectionFetcher,
api_client=self.api,
return ArvadosCommandTool(self, toolpath_object, loadingContext)
elif "class" in toolpath_object and toolpath_object["class"] == "Workflow":
return ArvadosWorkflow(self, toolpath_object, loadingContext)
+ elif "class" in toolpath_object and toolpath_object["class"] == "ExpressionTool":
+ return ArvadosExpressionTool(self, toolpath_object, loadingContext)
else:
- return cwltool.workflow.default_make_tool(toolpath_object, loadingContext)
+ raise Exception("Unknown tool %s" % toolpath_object.get("class"))
def output_callback(self, out, processStatus):
with self.workflow_eval_lock:
def start_run(self, runnable, runtimeContext):
- self.task_queue.add(partial(runnable.run, runtimeContext))
+ self.task_queue.add(partial(runnable.run, runtimeContext),
+ self.workflow_eval_lock, self.stop_polling)
def process_submitted(self, container):
with self.workflow_eval_lock:
with self.workflow_eval_lock:
j = self.processes[uuid]
logger.info("%s %s is %s", self.label(j), uuid, record["state"])
- self.task_queue.add(partial(j.done, record))
+ self.task_queue.add(partial(j.done, record),
+ self.workflow_eval_lock, self.stop_polling)
del self.processes[uuid]
def runtime_status_update(self, kind, message, detail=None):
uuid=existing_uuid,
submit_runner_ram=runtimeContext.submit_runner_ram,
name=runtimeContext.name,
- merged_map=merged_map)
+ merged_map=merged_map,
+ loadingContext=loadingContext)
tmpl.save()
# cwltool.main will write our return value to stdout.
return (tmpl.uuid, "success")
if runtimeContext.priority < 1 or runtimeContext.priority > 1000:
raise Exception("--priority must be in the range 1..1000.")
+ if self.should_estimate_cache_size:
+ visited = set()
+ estimated_size = [0]
+ def estimate_collection_cache(obj):
+ if obj.get("location", "").startswith("keep:"):
+ m = pdh_size.match(obj["location"][5:])
+ if m and m.group(1) not in visited:
+ visited.add(m.group(1))
+ estimated_size[0] += int(m.group(2))
+ visit_class(job_order, ("File", "Directory"), estimate_collection_cache)
+ runtimeContext.collection_cache_size = max(((estimated_size[0]*192) / (1024*1024))+1, 256)
+ self.collection_cache.set_cap(runtimeContext.collection_cache_size*1024*1024)
+
+ logger.info("Using collection cache size %s MiB", runtimeContext.collection_cache_size)
+
runnerjob = None
if runtimeContext.submit:
# Submit a runner job to run the workflow for us.
if self.work_api == "containers":
if tool.tool["class"] == "CommandLineTool" and runtimeContext.wait and (not runtimeContext.always_submit_runner):
runtimeContext.runnerjob = tool.tool["id"]
- runnerjob = tool.job(job_order,
- self.output_callback,
- runtimeContext).next()
else:
- runnerjob = RunnerContainer(self, tool, job_order, runtimeContext.enable_reuse,
+ tool = RunnerContainer(self, tool, loadingContext, runtimeContext.enable_reuse,
self.output_name,
self.output_tags,
submit_runner_ram=runtimeContext.submit_runner_ram,
intermediate_output_ttl=runtimeContext.intermediate_output_ttl,
merged_map=merged_map,
priority=runtimeContext.priority,
- secret_store=self.secret_store)
+ secret_store=self.secret_store,
+ collection_cache_size=runtimeContext.collection_cache_size,
+ collection_cache_is_default=self.should_estimate_cache_size)
elif self.work_api == "jobs":
- runnerjob = RunnerJob(self, tool, job_order, runtimeContext.enable_reuse,
+ tool = RunnerJob(self, tool, loadingContext, runtimeContext.enable_reuse,
self.output_name,
self.output_tags,
submit_runner_ram=runtimeContext.submit_runner_ram,
"state": "RunningOnClient"}).execute(num_retries=self.num_retries)
logger.info("Pipeline instance %s", self.pipeline["uuid"])
- if runnerjob and not runtimeContext.wait:
- submitargs = runtimeContext.copy()
- submitargs.submit = False
- runnerjob.run(submitargs)
+ if runtimeContext.cwl_runner_job is not None:
+ self.uuid = runtimeContext.cwl_runner_job.get('uuid')
+
+ jobiter = tool.job(job_order,
+ self.output_callback,
+ runtimeContext)
+
+ if runtimeContext.submit and not runtimeContext.wait:
+ runnerjob = jobiter.next()
+ runnerjob.run(runtimeContext)
return (runnerjob.uuid, "success")
current_container = arvados_cwl.util.get_current_container(self.api, self.num_retries, logger)
try:
self.workflow_eval_lock.acquire()
- if runnerjob:
- jobiter = iter((runnerjob,))
- else:
- if runtimeContext.cwl_runner_job is not None:
- self.uuid = runtimeContext.cwl_runner_job.get('uuid')
- jobiter = tool.job(job_order,
- self.output_callback,
- runtimeContext)
# Holds the lock while this code runs and releases it when
# it is safe to do so in self.workflow_eval_lock.wait(),
else:
logger.error("Workflow is deadlocked, no runnable processes and not waiting on any pending processes.")
break
+
+ if self.stop_polling.is_set():
+ break
+
loopperf.__enter__()
loopperf.__exit__()
if self.pipeline:
self.api.pipeline_instances().update(uuid=self.pipeline["uuid"],
body={"state": "Failed"}).execute(num_retries=self.num_retries)
- if runnerjob and runnerjob.uuid and self.work_api == "containers":
- self.api.container_requests().update(uuid=runnerjob.uuid,
+ if runtimeContext.submit and isinstance(tool, Runner):
+ runnerjob = tool
+ if runnerjob.uuid and self.work_api == "containers":
+ self.api.container_requests().update(uuid=runnerjob.uuid,
body={"priority": "0"}).execute(num_retries=self.num_retries)
finally:
self.workflow_eval_lock.release()
if self.final_output is None:
raise WorkflowException("Workflow did not return a result.")
- if runtimeContext.submit and isinstance(runnerjob, Runner):
- logger.info("Final output collection %s", runnerjob.final_output)
+ if runtimeContext.submit and isinstance(tool, Runner):
+ logger.info("Final output collection %s", tool.final_output)
else:
if self.output_name is None:
self.output_name = "Output of %s" % (shortname(tool.tool["id"]))
logger = logging.getLogger('arvados.cwl-runner')
+pdh_size = re.compile(r'([0-9a-f]{32})\+(\d+)(\+\S+)*')
+
class CollectionCache(object):
def __init__(self, api_client, keep_client, num_retries,
cap=256*1024*1024,
self.cap = cap
self.min_entries = min_entries
- def cap_cache(self):
- if self.total > self.cap:
- # ordered list iterates from oldest to newest
- for pdh, v in self.collections.items():
- if self.total < self.cap or len(self.collections) < self.min_entries:
- break
- # cut it loose
- logger.debug("Evicting collection reader %s from cache", pdh)
- del self.collections[pdh]
- self.total -= v[1]
+ def set_cap(self, cap):
+ self.cap = cap
+
+ def cap_cache(self, required):
+ # ordered dict iterates from oldest to newest
+ for pdh, v in self.collections.items():
+ available = self.cap - self.total
+ if available >= required or len(self.collections) < self.min_entries:
+ return
+ # cut it loose
+ logger.debug("Evicting collection reader %s from cache (cap %s total %s required %s)", pdh, self.cap, self.total, required)
+ del self.collections[pdh]
+ self.total -= v[1]
def get(self, pdh):
with self.lock:
if pdh not in self.collections:
+ m = pdh_size.match(pdh)
+ if m:
+ self.cap_cache(int(m.group(2)) * 128)
logger.debug("Creating collection reader for %s", pdh)
cr = arvados.collection.CollectionReader(pdh, api_client=self.api_client,
keep_client=self.keep_client,
sz = len(cr.manifest_text()) * 128
self.collections[pdh] = (cr, sz)
self.total += sz
- self.cap_cache()
else:
cr, sz = self.collections[pdh]
# bump it to the back
from cwltool.command_line_tool import CommandLineTool
import cwltool.workflow
-from cwltool.process import scandeps, UnsupportedRequirement, normalizeFilesDirs, shortname
+from cwltool.process import scandeps, UnsupportedRequirement, normalizeFilesDirs, shortname, Process
from cwltool.load_tool import fetch_document
from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, visit_class
from cwltool.utils import aslist
return collection.portable_data_hash()
-class Runner(object):
+class Runner(Process):
"""Base class for runner processes, which submit an instance of
arvados-cwl-runner and wait for the final result."""
- def __init__(self, runner, tool, job_order, enable_reuse,
+ def __init__(self, runner, tool, loadingContext, enable_reuse,
output_name, output_tags, submit_runner_ram=0,
name=None, on_error=None, submit_runner_image=None,
intermediate_output_ttl=0, merged_map=None,
- priority=None, secret_store=None):
+ priority=None, secret_store=None,
+ collection_cache_size=256,
+ collection_cache_is_default=True):
+
+ super(Runner, self).__init__(tool.tool, loadingContext)
+
self.arvrunner = runner
- self.tool = tool
- self.job_order = job_order
+ self.embedded_tool = tool
+ self.job_order = None
self.running = False
if enable_reuse:
# If reuse is permitted by command line arguments but
# disabled by the workflow itself, disable it.
- reuse_req, _ = self.tool.get_requirement("http://arvados.org/cwl#ReuseRequirement")
+ reuse_req, _ = self.embedded_tool.get_requirement("http://arvados.org/cwl#ReuseRequirement")
if reuse_req:
enable_reuse = reuse_req["enableReuse"]
self.enable_reuse = enable_reuse
self.submit_runner_cores = 1
self.submit_runner_ram = 1024 # defaut 1 GiB
+ self.collection_cache_size = collection_cache_size
- runner_resource_req, _ = self.tool.get_requirement("http://arvados.org/cwl#WorkflowRunnerResources")
+ runner_resource_req, _ = self.embedded_tool.get_requirement("http://arvados.org/cwl#WorkflowRunnerResources")
if runner_resource_req:
if runner_resource_req.get("coresMin"):
self.submit_runner_cores = runner_resource_req["coresMin"]
if runner_resource_req.get("ramMin"):
self.submit_runner_ram = runner_resource_req["ramMin"]
+ if runner_resource_req.get("keep_cache") and collection_cache_is_default:
+ self.collection_cache_size = runner_resource_req["keep_cache"]
if submit_runner_ram:
# Command line / initializer overrides default and/or spec from workflow
self.merged_map = merged_map or {}
+ def job(self,
+ job_order, # type: Mapping[Text, Text]
+ output_callbacks, # type: Callable[[Any, Any], Any]
+ runtimeContext # type: RuntimeContext
+ ): # type: (...) -> Generator[Any, None, None]
+ self.job_order = job_order
+ self._init_job(job_order, runtimeContext)
+ yield self
+
def update_pipeline_component(self, record):
pass
class TaskQueue(object):
def __init__(self, lock, thread_count):
self.thread_count = thread_count
- self.task_queue = Queue.Queue()
+ self.task_queue = Queue.Queue(maxsize=self.thread_count)
self.task_queue_threads = []
self.lock = lock
self.in_flight = 0
t.start()
def task_queue_func(self):
+ while True:
+ task = self.task_queue.get()
+ if task is None:
+ return
+ try:
+ task()
+ except Exception as e:
+ logger.exception("Unhandled exception running task")
+ self.error = e
- while True:
- task = self.task_queue.get()
- if task is None:
- return
- try:
- task()
- except Exception as e:
- logger.exception("Unhandled exception running task")
- self.error = e
-
- with self.lock:
- self.in_flight -= 1
-
- def add(self, task):
- with self.lock:
- if self.thread_count > 1:
+ with self.lock:
+ self.in_flight -= 1
+
+ def add(self, task, unlock, check_done):
+ if self.thread_count > 1:
+ with self.lock:
self.in_flight += 1
- self.task_queue.put(task)
- else:
- task()
+ else:
+ task()
+ return
+
+ while True:
+ try:
+ unlock.release()
+ if check_done.is_set():
+ return
+ self.task_queue.put(task, block=True, timeout=3)
+ return
+ except Queue.Full:
+ pass
+ finally:
+ unlock.acquire()
+
def drain(self):
try:
SETUP_DIR = os.path.dirname(__file__) or '.'
def git_latest_tag():
- gitinfo = subprocess.check_output(
- ['git', 'describe', '--abbrev=0']).strip()
- return str(gitinfo.decode('utf-8'))
+ gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+ gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+ return str(next(iter(gittags)).decode('utf-8'))
def choose_version_from():
sdk_ts = subprocess.check_output(
from source package), leave it alone.
"""
def git_latest_tag(self):
- gitinfo = subprocess.check_output(
- ['git', 'describe', '--abbrev=0']).strip()
- return str(gitinfo.decode('utf-8'))
+ gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+ gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+ return str(next(iter(gittags)).decode('utf-8'))
def git_timestamp_tag(self):
gitinfo = subprocess.check_output(
'schema-salad==2.7.20181116024232',
'typing >= 3.6.4',
'ruamel.yaml >=0.15.54, <=0.15.77',
- 'arvados-python-client>=1.1.4.20180607143841',
+ 'arvados-python-client>=1.2.1.20181130020805',
'setuptools',
'ciso8601 >=1.0.6, <2.0.0',
'subprocess32>=3.5.1',
--- /dev/null
+{
+ "x": {
+ "class": "File",
+ "path": "input/blorp.txt"
+ },
+ "y": {
+ "class": "Directory",
+ "location": "keep:99999999999999999999999999999998+99",
+ "listing": [{
+ "class": "File",
+ "location": "keep:99999999999999999999999999999998+99/file1.txt"
+ }]
+ }
+}
cache = CollectionCache(mock.MagicMock(), mock.MagicMock(), 4)
cr().manifest_text.return_value = 'x' * 524289
self.assertEqual(0, cache.total)
- c1 = cache.get("99999999999999999999999999999991+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertNotIn("99999999999999999999999999999992+99", cache.collections)
+ c1 = cache.get("99999999999999999999999999999991+524289")
+ self.assertIn("99999999999999999999999999999991+524289", cache.collections)
+ self.assertNotIn("99999999999999999999999999999992+524289", cache.collections)
self.assertEqual((524289*128)*1, cache.total)
- c2 = cache.get("99999999999999999999999999999992+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertIn("99999999999999999999999999999992+99", cache.collections)
+ c2 = cache.get("99999999999999999999999999999992+524289")
+ self.assertIn("99999999999999999999999999999991+524289", cache.collections)
+ self.assertIn("99999999999999999999999999999992+524289", cache.collections)
self.assertEqual((524289*128)*2, cache.total)
- c1 = cache.get("99999999999999999999999999999991+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertIn("99999999999999999999999999999992+99", cache.collections)
+ c1 = cache.get("99999999999999999999999999999991+524289")
+ self.assertIn("99999999999999999999999999999991+524289", cache.collections)
+ self.assertIn("99999999999999999999999999999992+524289", cache.collections)
self.assertEqual((524289*128)*2, cache.total)
- c3 = cache.get("99999999999999999999999999999993+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertIn("99999999999999999999999999999992+99", cache.collections)
+ c3 = cache.get("99999999999999999999999999999993+524289")
+ self.assertIn("99999999999999999999999999999991+524289", cache.collections)
+ self.assertIn("99999999999999999999999999999992+524289", cache.collections)
self.assertEqual((524289*128)*3, cache.total)
- c4 = cache.get("99999999999999999999999999999994+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertNotIn("99999999999999999999999999999992+99", cache.collections)
+ c4 = cache.get("99999999999999999999999999999994+524289")
+ self.assertIn("99999999999999999999999999999991+524289", cache.collections)
+ self.assertNotIn("99999999999999999999999999999992+524289", cache.collections)
self.assertEqual((524289*128)*3, cache.total)
- c5 = cache.get("99999999999999999999999999999995+99")
- self.assertNotIn("99999999999999999999999999999991+99", cache.collections)
- self.assertNotIn("99999999999999999999999999999992+99", cache.collections)
+ c5 = cache.get("99999999999999999999999999999995+524289")
+ self.assertNotIn("99999999999999999999999999999991+524289", cache.collections)
+ self.assertNotIn("99999999999999999999999999999992+524289", cache.collections)
self.assertEqual((524289*128)*3, cache.total)
cache = CollectionCache(mock.MagicMock(), mock.MagicMock(), 4)
cr().manifest_text.return_value = 'x' * 524287
self.assertEqual(0, cache.total)
- c1 = cache.get("99999999999999999999999999999991+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertNotIn("99999999999999999999999999999992+99", cache.collections)
+ c1 = cache.get("99999999999999999999999999999991+524287")
+ self.assertIn("99999999999999999999999999999991+524287", cache.collections)
+ self.assertNotIn("99999999999999999999999999999992+524287", cache.collections)
self.assertEqual((524287*128)*1, cache.total)
- c2 = cache.get("99999999999999999999999999999992+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertIn("99999999999999999999999999999992+99", cache.collections)
+ c2 = cache.get("99999999999999999999999999999992+524287")
+ self.assertIn("99999999999999999999999999999991+524287", cache.collections)
+ self.assertIn("99999999999999999999999999999992+524287", cache.collections)
self.assertEqual((524287*128)*2, cache.total)
- c1 = cache.get("99999999999999999999999999999991+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertIn("99999999999999999999999999999992+99", cache.collections)
+ c1 = cache.get("99999999999999999999999999999991+524287")
+ self.assertIn("99999999999999999999999999999991+524287", cache.collections)
+ self.assertIn("99999999999999999999999999999992+524287", cache.collections)
self.assertEqual((524287*128)*2, cache.total)
- c3 = cache.get("99999999999999999999999999999993+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertIn("99999999999999999999999999999992+99", cache.collections)
+ c3 = cache.get("99999999999999999999999999999993+524287")
+ self.assertIn("99999999999999999999999999999991+524287", cache.collections)
+ self.assertIn("99999999999999999999999999999992+524287", cache.collections)
self.assertEqual((524287*128)*3, cache.total)
- c4 = cache.get("99999999999999999999999999999994+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertIn("99999999999999999999999999999992+99", cache.collections)
+ c4 = cache.get("99999999999999999999999999999994+524287")
+ self.assertIn("99999999999999999999999999999991+524287", cache.collections)
+ self.assertIn("99999999999999999999999999999992+524287", cache.collections)
self.assertEqual((524287*128)*4, cache.total)
- c5 = cache.get("99999999999999999999999999999995+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertNotIn("99999999999999999999999999999992+99", cache.collections)
+ c5 = cache.get("99999999999999999999999999999995+524287")
+ self.assertIn("99999999999999999999999999999991+524287", cache.collections)
+ self.assertNotIn("99999999999999999999999999999992+524287", cache.collections)
self.assertEqual((524287*128)*4, cache.total)
- c6 = cache.get("99999999999999999999999999999996+99")
- self.assertNotIn("99999999999999999999999999999991+99", cache.collections)
- self.assertNotIn("99999999999999999999999999999992+99", cache.collections)
+ c6 = cache.get("99999999999999999999999999999996+524287")
+ self.assertNotIn("99999999999999999999999999999991+524287", cache.collections)
+ self.assertNotIn("99999999999999999999999999999992+524287", cache.collections)
self.assertEqual((524287*128)*4, cache.total)
'state': 'Committed',
'command': ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--enable-reuse', '--debug', '--on-error=continue',
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=256", '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json'],
'name': 'submit_wf.cwl',
'container_image': '999999999999999999999999999999d3+99',
'runtime_constraints': {
'API': True,
'vcpus': 1,
- 'ram': 1024*1024*1024
+ 'ram': (1024+256)*1024*1024
},
'use_existing': True,
'properties': {},
expect_container["command"] = [
'arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--disable-reuse', '--debug', '--on-error=continue',
+ '--eval-timeout=20', '--thread-count=1',
+ '--disable-reuse', "--collection-cache-size=256",
+ '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container["use_existing"] = False
expect_container["command"] = [
'arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--disable-reuse', '--debug', '--on-error=continue',
+ '--eval-timeout=20', '--thread-count=1',
+ '--disable-reuse', "--collection-cache-size=256", '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container["use_existing"] = False
expect_container["name"] = "submit_wf_no_reuse.cwl"
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--enable-reuse', '--debug', '--on-error=stop',
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=256",
+ '--debug', '--on-error=stop',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
stubs.api.container_requests().create.assert_called_with(
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--enable-reuse',
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=256",
"--output-name="+output_name, '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container["output_name"] = output_name
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--enable-reuse', "--debug",
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=256", "--debug",
"--storage-classes=foo", '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--enable-reuse', '--debug', '--on-error=continue',
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=256", '--debug',
+ '--on-error=continue',
"--intermediate-output-ttl=3600",
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--enable-reuse', '--debug', '--on-error=continue',
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=256",
+ '--debug', '--on-error=continue',
"--trash-intermediate",
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--enable-reuse',
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=256",
"--output-tags="+output_tags, '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
logging.exception("")
expect_container = copy.deepcopy(stubs.expect_container_spec)
- expect_container["runtime_constraints"]["ram"] = 2048*1024*1024
+ expect_container["runtime_constraints"]["ram"] = (2048+256)*1024*1024
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
'container_image': '999999999999999999999999999999d3+99',
'command': ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--enable-reuse', '--debug', '--on-error=continue',
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=256", '--debug', '--on-error=continue',
'/var/lib/cwl/workflow/expect_arvworkflow.cwl#main', '/var/lib/cwl/cwl.input.json'],
'cwd': '/var/spool/cwl',
'runtime_constraints': {
'API': True,
'vcpus': 1,
- 'ram': 1073741824
+ 'ram': 1342177280
},
'use_existing': True,
'properties': {},
'container_image': "999999999999999999999999999999d3+99",
'command': ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--enable-reuse', '--debug', '--on-error=continue',
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=256", '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json'],
'cwd': '/var/spool/cwl',
'runtime_constraints': {
'API': True,
'vcpus': 1,
- 'ram': 1073741824
+ 'ram': 1342177280
},
'use_existing': True,
'properties': {
stubs.expect_container_request_uuid + '\n')
+ @stubs
+ def test_submit_missing_input(self, stubs):
+ capture_stdout = cStringIO.StringIO()
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
+ self.assertEqual(exited, 0)
+
+ capture_stdout = cStringIO.StringIO()
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job_missing.json"],
+ capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
+ self.assertEqual(exited, 1)
+
+
@stubs
def test_submit_container_project(self, stubs):
project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
expect_container["owner_uuid"] = project_uuid
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- "--eval-timeout=20", "--thread-count=4",
- '--enable-reuse', '--debug', '--on-error=continue',
+ "--eval-timeout=20", "--thread-count=1",
+ '--enable-reuse', "--collection-cache-size=256", '--debug',
+ '--on-error=continue',
'--project-uuid='+project_uuid,
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=60.0', '--thread-count=4',
- '--enable-reuse', '--debug', '--on-error=continue',
+ '--eval-timeout=60.0', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=256",
+ '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
stubs.api.container_requests().create.assert_called_with(
self.assertEqual(capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
+ @stubs
+ def test_submit_container_collection_cache(self, stubs):
+ project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
+ capture_stdout = cStringIO.StringIO()
+ try:
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--collection-cache-size=500",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
+ self.assertEqual(exited, 0)
+ except:
+ logging.exception("")
+
+ expect_container = copy.deepcopy(stubs.expect_container_spec)
+ expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
+ '--no-log-timestamps', '--disable-validate',
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=500",
+ '--debug', '--on-error=continue',
+ '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
+ expect_container["runtime_constraints"]["ram"] = (1024+500)*1024*1024
+
+ stubs.api.container_requests().create.assert_called_with(
+ body=JsonDiffMatcher(expect_container))
+ self.assertEqual(capture_stdout.getvalue(),
+ stubs.expect_container_request_uuid + '\n')
+
@stubs
def test_submit_container_thread_count(self, stubs):
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
'--eval-timeout=20', '--thread-count=20',
- '--enable-reuse', '--debug', '--on-error=continue',
+ '--enable-reuse', "--collection-cache-size=256",
+ '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
stubs.api.container_requests().create.assert_called_with(
expect_container["runtime_constraints"] = {
"API": True,
"vcpus": 2,
- "ram": 2000 * 2**20
+ "ram": (2000+512) * 2**20
}
expect_container["name"] = "submit_wf_runner_resources.cwl"
expect_container["mounts"]["/var/lib/cwl/workflow.json"]["content"]["$graph"][1]["hints"] = [
{
"class": "http://arvados.org/cwl#WorkflowRunnerResources",
"coresMin": 2,
- "ramMin": 2000
+ "ramMin": 2000,
+ "keep_cache": 512
}
]
expect_container["mounts"]["/var/lib/cwl/workflow.json"]["content"]["$graph"][0]["$namespaces"] = {
"arv": "http://arvados.org/cwl#",
}
+ expect_container['command'] = ['arvados-cwl-runner', '--local', '--api=containers',
+ '--no-log-timestamps', '--disable-validate',
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=512", '--debug', '--on-error=continue',
+ '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
"--no-log-timestamps",
"--disable-validate",
"--eval-timeout=20",
- '--thread-count=4',
+ '--thread-count=1',
"--enable-reuse",
+ "--collection-cache-size=256",
'--debug',
"--on-error=continue",
"/var/lib/cwl/workflow.json#main",
"properties": {},
"runtime_constraints": {
"API": True,
- "ram": 1073741824,
+ "ram": 1342177280,
"vcpus": 1
},
"secret_mounts": {
class TestTaskQueue(unittest.TestCase):
def test_tq(self):
tq = TaskQueue(threading.Lock(), 2)
+ try:
+ self.assertIsNone(tq.error)
- self.assertIsNone(tq.error)
-
- tq.add(success_task)
- tq.add(success_task)
- tq.add(success_task)
- tq.add(success_task)
+ unlock = threading.Lock()
+ unlock.acquire()
+ check_done = threading.Event()
- tq.join()
+ tq.add(success_task, unlock, check_done)
+ tq.add(success_task, unlock, check_done)
+ tq.add(success_task, unlock, check_done)
+ tq.add(success_task, unlock, check_done)
+ finally:
+ tq.join()
self.assertIsNone(tq.error)
def test_tq_error(self):
tq = TaskQueue(threading.Lock(), 2)
-
- self.assertIsNone(tq.error)
-
- tq.add(success_task)
- tq.add(success_task)
- tq.add(fail_task)
- tq.add(success_task)
-
- tq.join()
+ try:
+ self.assertIsNone(tq.error)
+
+ unlock = threading.Lock()
+ unlock.acquire()
+ check_done = threading.Event()
+
+ tq.add(success_task, unlock, check_done)
+ tq.add(success_task, unlock, check_done)
+ tq.add(fail_task, unlock, check_done)
+ tq.add(success_task, unlock, check_done)
+ finally:
+ tq.join()
self.assertIsNotNone(tq.error)
arv:WorkflowRunnerResources:
ramMin: 2000
coresMin: 2
+ keep_cache: 512
inputs:
- id: x
type: File
import re
def git_latest_tag():
- gitinfo = subprocess.check_output(
- ['git', 'describe', '--abbrev=0']).strip()
- return str(gitinfo.decode('utf-8'))
+ gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+ gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+ return str(next(iter(gittags)).decode('utf-8'))
def git_timestamp_tag():
gitinfo = subprocess.check_output(
import re
def git_latest_tag():
- gitinfo = subprocess.check_output(
- ['git', 'describe', '--abbrev=0']).strip()
- return str(gitinfo.decode('utf-8'))
+ gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+ gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+ return str(next(iter(gittags)).decode('utf-8'))
def git_timestamp_tag():
gitinfo = subprocess.check_output(
from source package), leave it alone.
"""
def git_latest_tag(self):
- gitinfo = subprocess.check_output(
- ['git', 'describe', '--abbrev=0']).strip()
- return str(gitinfo.decode('utf-8'))
+ gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+ gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+ return str(next(iter(gittags)).decode('utf-8'))
def git_timestamp_tag(self):
gitinfo = subprocess.check_output(
exit
end
-git_latest_tag = `git describe --abbrev=0`
+git_latest_tag = `git tag -l |sort -V -r |head -n1`
git_latest_tag = git_latest_tag.encode('utf-8').strip
git_timestamp, git_hash = `git log -n1 --first-parent --format=%ct:%H .`.chomp.split(":")
git_timestamp = Time.at(git_timestamp.to_i).utc
if !include_trash
if sql_table != "api_client_authorizations"
# Only include records where the owner is not trashed
- sql_conds = "NOT EXISTS(SELECT 1 FROM #{PERMISSION_VIEW} "+
- "WHERE trashed = 1 AND "+
- "(#{sql_table}.owner_uuid = target_uuid)) #{exclude_trashed_records}"
+ sql_conds = "#{sql_table}.owner_uuid NOT IN (SELECT target_uuid FROM #{PERMISSION_VIEW} "+
+ "WHERE trashed = 1) #{exclude_trashed_records}"
end
end
else
# see issue 13208 for details.
# Match a direct read permission link from the user to the record uuid
- direct_check = "EXISTS(SELECT 1 FROM #{PERMISSION_VIEW} "+
- "WHERE user_uuid IN (:user_uuids) AND perm_level >= 1 #{trashed_check} AND target_uuid = #{sql_table}.uuid)"
+ direct_check = "#{sql_table}.uuid IN (SELECT target_uuid FROM #{PERMISSION_VIEW} "+
+ "WHERE user_uuid IN (:user_uuids) AND perm_level >= 1 #{trashed_check})"
# Match a read permission link from the user to the record's owner_uuid
owner_check = ""
if sql_table != "api_client_authorizations" and sql_table != "groups" then
- owner_check = "OR EXISTS(SELECT 1 FROM #{PERMISSION_VIEW} "+
- "WHERE user_uuid IN (:user_uuids) AND perm_level >= 1 #{trashed_check} AND target_uuid = #{sql_table}.owner_uuid AND target_owner_uuid IS NOT NULL) "
+ owner_check = "OR #{sql_table}.owner_uuid IN (SELECT target_uuid FROM #{PERMISSION_VIEW} "+
+ "WHERE user_uuid IN (:user_uuids) AND perm_level >= 1 #{trashed_check} AND target_owner_uuid IS NOT NULL) "
end
links_cond = ""
cast = serialized_attributes[column] ? '::text' : ''
"coalesce(#{column}#{cast},'')"
end
- "to_tsvector('english', #{parts.join(" || ' ' || ")})"
+ "to_tsvector('english', substr(#{parts.join(" || ' ' || ")}, 0, 8000))"
end
def self.apply_filters query, filters
candidates = candidates.where_serialized(:runtime_constraints, resolve_runtime_constraints(attrs[:runtime_constraints]), md5: true)
log_reuse_info(candidates) { "after filtering on runtime_constraints #{attrs[:runtime_constraints].inspect}" }
- candidates = candidates.where('runtime_user_uuid = ? or (runtime_user_uuid is NULL and runtime_auth_scopes is NULL)',
- attrs[:runtime_user_uuid])
- log_reuse_info(candidates) { "after filtering on runtime_user_uuid #{attrs[:runtime_user_uuid].inspect}" }
-
- candidates = candidates.where('runtime_auth_scopes = ? or (runtime_user_uuid is NULL and runtime_auth_scopes is NULL)',
- SafeJSON.dump(attrs[:runtime_auth_scopes].sort))
- log_reuse_info(candidates) { "after filtering on runtime_auth_scopes #{attrs[:runtime_auth_scopes].inspect}" }
-
log_reuse_info { "checking for state=Complete with readable output and log..." }
select_readable_pdh = Collection.
--- /dev/null
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+require './db/migrate/20161213172944_full_text_search_indexes'
+
+class ReplaceFullTextIndexes < ActiveRecord::Migration
+ def up
+ FullTextSearchIndexes.new.up
+ end
+
+ def down
+ end
+end
-- Name: collections_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX collections_full_text_search_idx ON public.collections USING gin (to_tsvector('english'::regconfig, (((((((((((((((((COALESCE(owner_uuid, ''::character varying))::text || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(portable_data_hash, ''::character varying))::text) || ' '::text) || (COALESCE(uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || COALESCE((properties)::text, ''::text)) || ' '::text) || COALESCE(file_names, (''::character varying)::text))));
+CREATE INDEX collections_full_text_search_idx ON public.collections USING gin (to_tsvector('english'::regconfig, substr((((((((((((((((((COALESCE(owner_uuid, ''::character varying))::text || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(portable_data_hash, ''::character varying))::text) || ' '::text) || (COALESCE(uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || COALESCE((properties)::text, ''::text)) || ' '::text) || COALESCE(file_names, ''::text)), 0, 1000000)));
--
-- Name: container_requests_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX container_requests_full_text_search_idx ON public.container_requests USING gin (to_tsvector('english'::regconfig, (((((((((((((((((((((((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(description, ''::text)) || ' '::text) || COALESCE((properties)::text, ''::text)) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || (COALESCE(requesting_container_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(container_uuid, ''::character varying))::text) || ' '::text) || COALESCE(runtime_constraints, ''::text)) || ' '::text) || (COALESCE(container_image, ''::character varying))::text) || ' '::text) || COALESCE(environment, ''::text)) || ' '::text) || (COALESCE(cwd, ''::character varying))::text) || ' '::text) || COALESCE(command, ''::text)) || ' '::text) || (COALESCE(output_path, ''::character varying))::text) || ' '::text) || COALESCE(filters, ''::text)) || ' '::text) || COALESCE(scheduling_parameters, ''::text)) || ' '::text) || (COALESCE(output_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(log_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(output_name, ''::character varying))::text)));
+CREATE INDEX container_requests_full_text_search_idx ON public.container_requests USING gin (to_tsvector('english'::regconfig, substr((((((((((((((((((((((((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(description, ''::text)) || ' '::text) || COALESCE((properties)::text, ''::text)) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || (COALESCE(requesting_container_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(container_uuid, ''::character varying))::text) || ' '::text) || COALESCE(runtime_constraints, ''::text)) || ' '::text) || (COALESCE(container_image, ''::character varying))::text) || ' '::text) || COALESCE(environment, ''::text)) || ' '::text) || (COALESCE(cwd, ''::character varying))::text) || ' '::text) || COALESCE(command, ''::text)) || ' '::text) || (COALESCE(output_path, ''::character varying))::text) || ' '::text) || COALESCE(filters, ''::text)) || ' '::text) || COALESCE(scheduling_parameters, ''::text)) || ' '::text) || (COALESCE(output_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(log_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(output_name, ''::character varying))::text), 0, 1000000)));
--
-- Name: groups_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX groups_full_text_search_idx ON public.groups USING gin (to_tsvector('english'::regconfig, (((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || (COALESCE(group_class, ''::character varying))::text) || ' '::text) || COALESCE((properties)::text, ''::text))));
+CREATE INDEX groups_full_text_search_idx ON public.groups USING gin (to_tsvector('english'::regconfig, substr((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || (COALESCE(group_class, ''::character varying))::text) || ' '::text) || COALESCE((properties)::text, ''::text)), 0, 1000000)));
--
-- Name: jobs_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX jobs_full_text_search_idx ON public.jobs USING gin (to_tsvector('english'::regconfig, (((((((((((((((((((((((((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(submit_id, ''::character varying))::text) || ' '::text) || (COALESCE(script, ''::character varying))::text) || ' '::text) || (COALESCE(script_version, ''::character varying))::text) || ' '::text) || COALESCE(script_parameters, ''::text)) || ' '::text) || (COALESCE(cancelled_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(cancelled_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(output, ''::character varying))::text) || ' '::text) || (COALESCE(is_locked_by_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(log, ''::character varying))::text) || ' '::text) || COALESCE(tasks_summary, ''::text)) || ' '::text) || COALESCE(runtime_constraints, ''::text)) || ' '::text) || (COALESCE(repository, ''::character varying))::text) || ' '::text) || (COALESCE(supplied_script_version, ''::character varying))::text) || ' '::text) || (COALESCE(docker_image_locator, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || (COALESCE(arvados_sdk_version, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text))));
+CREATE INDEX jobs_full_text_search_idx ON public.jobs USING gin (to_tsvector('english'::regconfig, substr((((((((((((((((((((((((((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(submit_id, ''::character varying))::text) || ' '::text) || (COALESCE(script, ''::character varying))::text) || ' '::text) || (COALESCE(script_version, ''::character varying))::text) || ' '::text) || COALESCE(script_parameters, ''::text)) || ' '::text) || (COALESCE(cancelled_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(cancelled_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(output, ''::character varying))::text) || ' '::text) || (COALESCE(is_locked_by_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(log, ''::character varying))::text) || ' '::text) || COALESCE(tasks_summary, ''::text)) || ' '::text) || COALESCE(runtime_constraints, ''::text)) || ' '::text) || (COALESCE(repository, ''::character varying))::text) || ' '::text) || (COALESCE(supplied_script_version, ''::character varying))::text) || ' '::text) || (COALESCE(docker_image_locator, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || (COALESCE(arvados_sdk_version, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text)), 0, 1000000)));
--
-- Name: pipeline_instances_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX pipeline_instances_full_text_search_idx ON public.pipeline_instances USING gin (to_tsvector('english'::regconfig, (((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(pipeline_template_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text)) || ' '::text) || COALESCE(properties, ''::text)) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || COALESCE(components_summary, ''::text)) || ' '::text) || (COALESCE(description, ''::character varying))::text)));
+CREATE INDEX pipeline_instances_full_text_search_idx ON public.pipeline_instances USING gin (to_tsvector('english'::regconfig, substr((((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(pipeline_template_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text)) || ' '::text) || COALESCE(properties, ''::text)) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || COALESCE(components_summary, ''::text)) || ' '::text) || (COALESCE(description, ''::character varying))::text), 0, 1000000)));
--
-- Name: pipeline_templates_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX pipeline_templates_full_text_search_idx ON public.pipeline_templates USING gin (to_tsvector('english'::regconfig, (((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text)) || ' '::text) || (COALESCE(description, ''::character varying))::text)));
+CREATE INDEX pipeline_templates_full_text_search_idx ON public.pipeline_templates USING gin (to_tsvector('english'::regconfig, substr((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text)) || ' '::text) || (COALESCE(description, ''::character varying))::text), 0, 1000000)));
--
-- Name: workflows_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX workflows_full_text_search_idx ON public.workflows USING gin (to_tsvector('english'::regconfig, (((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(description, ''::text))));
+CREATE INDEX workflows_full_text_search_idx ON public.workflows USING gin (to_tsvector('english'::regconfig, substr((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(description, ''::text)), 0, 1000000)));
--
INSERT INTO schema_migrations (version) VALUES ('20180915155335');
+INSERT INTO schema_migrations (version) VALUES ('20180917200000');
+
INSERT INTO schema_migrations (version) VALUES ('20180917205609');
INSERT INTO schema_migrations (version) VALUES ('20180919001158');
c1, _ = minimal_new(common_attrs.merge({runtime_token: api_client_authorizations(:active).token}))
assert_equal Container::Queued, c1.state
reused = Container.find_reusable(common_attrs.merge(runtime_token_attr(:container_runtime_token)))
- assert_nil reused
+ # See #14584
+ assert_equal c1.uuid, reused.uuid
end
test "find_reusable method with nil runtime_token, then runtime_token with different user" do
c1, _ = minimal_new(common_attrs.merge({runtime_token: nil}))
assert_equal Container::Queued, c1.state
reused = Container.find_reusable(common_attrs.merge(runtime_token_attr(:container_runtime_token)))
- assert_nil reused
+ # See #14584
+ assert_equal c1.uuid, reused.uuid
end
test "find_reusable method with different runtime_token, different scope, same user" do
c1, _ = minimal_new(common_attrs.merge({runtime_token: api_client_authorizations(:runtime_token_limited_scope).token}))
assert_equal Container::Queued, c1.state
reused = Container.find_reusable(common_attrs.merge(runtime_token_attr(:container_runtime_token)))
- assert_nil reused
+ # See #14584
+ assert_equal c1.uuid, reused.uuid
end
test "Container running" do
import re
def git_latest_tag():
- gitinfo = subprocess.check_output(
- ['git', 'describe', '--abbrev=0']).strip()
- return str(gitinfo.decode('utf-8'))
+ gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+ gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+ return str(next(iter(gittags)).decode('utf-8'))
def git_timestamp_tag():
gitinfo = subprocess.check_output(
import re
def git_latest_tag():
- gitinfo = subprocess.check_output(
- ['git', 'describe', '--abbrev=0']).strip()
- return str(gitinfo.decode('utf-8'))
+ gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+ gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+ return str(next(iter(gittags)).decode('utf-8'))
def git_timestamp_tag():
gitinfo = subprocess.check_output(
exit
end
-git_latest_tag = `git describe --abbrev=0`
+git_latest_tag = `git tag -l |sort -V -r |head -n1`
git_latest_tag = git_latest_tag.encode('utf-8').strip
git_timestamp, git_hash = `git log -n1 --first-parent --format=%ct:%H .`.chomp.split(":")
git_timestamp = Time.at(git_timestamp.to_i).utc
import re
def git_latest_tag():
- gitinfo = subprocess.check_output(
- ['git', 'describe', '--abbrev=0']).strip()
- return str(gitinfo.decode('utf-8'))
+ gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+ gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+ return str(next(iter(gittags)).decode('utf-8'))
def git_timestamp_tag():
gitinfo = subprocess.check_output(
import re
def git_latest_tag():
- gitinfo = subprocess.check_output(
- ['git', 'describe', '--abbrev=0']).strip()
- return str(gitinfo.decode('utf-8'))
+ gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+ gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+ return str(next(iter(gittags)).decode('utf-8'))
def git_timestamp_tag():
gitinfo = subprocess.check_output(