sdk/R/.Rbuildignore
sdk/R/ArvadosR.Rproj
*.Rd
+*.asc
attrs['state'] = "Uncommitted"
# required
- attrs['command'] = ["arvados-cwl-runner",
- "--local",
- "--api=containers",
- "--project-uuid=#{params['work_unit']['owner_uuid']}",
- "/var/lib/cwl/workflow.json#main",
- "/var/lib/cwl/cwl.input.json"]
attrs['container_image'] = "arvados/jobs"
attrs['cwd'] = "/var/spool/cwl"
attrs['output_path'] = "/var/spool/cwl"
"API" => true
}
+ keep_cache = 256
input_defaults = {}
if wf_json
main = get_cwl_main(wf_json)
if hint[:ramMin]
runtime_constraints["ram"] = hint[:ramMin] * 1024 * 1024
end
+ if hint[:keep_cache]
+ keep_cache = hint[:keep_cache]
+ end
end
end
end
end
+ attrs['command'] = ["arvados-cwl-runner",
+ "--local",
+ "--api=containers",
+ "--project-uuid=#{params['work_unit']['owner_uuid']}",
+ "--collection-keep-cache=#{keep_cache}",
+ "/var/lib/cwl/workflow.json#main",
+ "/var/lib/cwl/cwl.input.json"]
+
# mounts
mounts = {
"/var/lib/cwl/cwl.input.json" => {
#
# SPDX-License-Identifier: AGPL-3.0
+set -e
+
+arvados-cwl-runner --version
+
exec python <<EOF
import arvados_cwl
print "arvados-cwl-runner version", arvados_cwl.__version__
# clean up the docker build environment
cd "$WORKSPACE"
+if [[ -z "$ARVADOS_BUILDING_VERSION" ]] && ! [[ -z "$version_tag" ]]; then
+ ARVADOS_BUILDING_VERSION="$version_tag"
+ ARVADOS_BUILDING_ITERATION="1"
+fi
+
python_sdk_ts=$(cd sdk/python && timestamp_from_git)
cwl_runner_ts=$(cd sdk/cwl && timestamp_from_git)
echo cwl_runner_version $cwl_runner_version python_sdk_version $python_sdk_version
+if [[ "${python_sdk_version}" != "${ARVADOS_BUILDING_VERSION}" ]]; then
+ python_sdk_version="${python_sdk_version}-2"
+else
+ python_sdk_version="${ARVADOS_BUILDING_VERSION}-${ARVADOS_BUILDING_ITERATION}"
+fi
+
+cwl_runner_version_orig=$cwl_runner_version
+
+if [[ "${cwl_runner_version}" != "${ARVADOS_BUILDING_VERSION}" ]]; then
+ cwl_runner_version="${cwl_runner_version}-4"
+else
+ cwl_runner_version="${ARVADOS_BUILDING_VERSION}-${ARVADOS_BUILDING_ITERATION}"
+fi
+
cd docker/jobs
docker build $NOCACHE \
- --build-arg python_sdk_version=${python_sdk_version}-2 \
- --build-arg cwl_runner_version=${cwl_runner_version}-4 \
- -t arvados/jobs:$cwl_runner_version .
+ --build-arg python_sdk_version=${python_sdk_version} \
+ --build-arg cwl_runner_version=${cwl_runner_version} \
+ -t arvados/jobs:$cwl_runner_version_orig .
ECODE=$?
FORCE=-f
fi
if ! [[ -z "$version_tag" ]]; then
- docker tag $FORCE arvados/jobs:$cwl_runner_version arvados/jobs:"$version_tag"
+ docker tag $FORCE arvados/jobs:$cwl_runner_version_orig arvados/jobs:"$version_tag"
else
- docker tag $FORCE arvados/jobs:$cwl_runner_version arvados/jobs:latest
+ docker tag $FORCE arvados/jobs:$cwl_runner_version_orig arvados/jobs:latest
fi
ECODE=$?
if ! [[ -z "$version_tag" ]]; then
docker_push arvados/jobs:"$version_tag"
else
- docker_push arvados/jobs:$cwl_runner_version
+ docker_push arvados/jobs:$cwl_runner_version_orig
docker_push arvados/jobs:latest
fi
title "upload arvados images finished (`timer`)"
package_go_binary tools/keep-exercise keep-exercise \
"Performance testing tool for Arvados Keep"
+
+# we need explicit debian_revision values in the dependencies for ruamel.yaml, because we have a package iteration
+# greater than zero. So we parse setup.py, get the ruamel.yaml dependencies, tell fpm not to automatically include
+# them in the package being built, and re-add them manually with an appropriate debian_revision value.
+# See #14552 for the reason for this (nasty) workaround. We use ${ruamel_depends[@]} in a few places further down
+# in this script.
+# Ward, 2018-11-28
+IFS=', ' read -r -a deps <<< `grep ruamel.yaml $WORKSPACE/sdk/python/setup.py |cut -f 3 -dl |sed -e "s/'//g"`
+declare -a ruamel_depends=()
+for i in ${deps[@]}; do
+ i=`echo "$i" | sed -e 's!\([0-9]\)! \1!'`
+ if [[ $i =~ .*\>.* ]]; then
+ ruamel_depends+=(--depends "python-ruamel.yaml $i-1")
+ elif [[ $i =~ .*\<.* ]]; then
+ ruamel_depends+=(--depends "python-ruamel.yaml $i-9")
+ else
+ echo "Encountered ruamel dependency that I can't parse. Aborting..."
+ exit 1
+ fi
+done
+
+
# The Python SDK
# Please resist the temptation to add --no-python-fix-name to the fpm call here
# (which would remove the python- prefix from the package name), because this
arvados_python_client_version=${ARVADOS_BUILDING_VERSION:-$(awk '($1 == "Version:"){print $2}' $WORKSPACE/sdk/python/arvados_python_client.egg-info/PKG-INFO)}
test_package_presence ${PYTHON2_PKG_PREFIX}-arvados-python-client "$arvados_python_client_version" python
if [[ "$?" == "0" ]]; then
- fpm_build $WORKSPACE/sdk/python "${PYTHON2_PKG_PREFIX}-arvados-python-client" 'Curoverse, Inc.' 'python' "$arvados_python_client_version" "--url=https://arvados.org" "--description=The Arvados Python SDK" --depends "${PYTHON2_PKG_PREFIX}-setuptools" --deb-recommends=git
+
+ fpm_build $WORKSPACE/sdk/python "${PYTHON2_PKG_PREFIX}-arvados-python-client" 'Curoverse, Inc.' 'python' "$arvados_python_client_version" "--url=https://arvados.org" "--description=The Arvados Python SDK" --depends "${PYTHON2_PKG_PREFIX}-setuptools" --deb-recommends=git --python-disable-dependency ruamel.yaml "${ruamel_depends[@]}"
fi
# cwl-runner
fi
test_package_presence ${PYTHON2_PKG_PREFIX}-arvados-cwl-runner "$arvados_cwl_runner_version" python "$arvados_cwl_runner_iteration"
if [[ "$?" == "0" ]]; then
- fpm_build $WORKSPACE/sdk/cwl "${PYTHON2_PKG_PREFIX}-arvados-cwl-runner" 'Curoverse, Inc.' 'python' "$arvados_cwl_runner_version" "--url=https://arvados.org" "--description=The Arvados CWL runner" --depends "${PYTHON2_PKG_PREFIX}-setuptools" --depends "${PYTHON2_PKG_PREFIX}-subprocess32 >= 3.5.0" --depends "${PYTHON2_PKG_PREFIX}-pathlib2" --depends "${PYTHON2_PKG_PREFIX}-scandir" "${iterargs[@]}"
+ fpm_build $WORKSPACE/sdk/cwl "${PYTHON2_PKG_PREFIX}-arvados-cwl-runner" 'Curoverse, Inc.' 'python' "$arvados_cwl_runner_version" "--url=https://arvados.org" "--description=The Arvados CWL runner" --depends "${PYTHON2_PKG_PREFIX}-setuptools" --depends "${PYTHON2_PKG_PREFIX}-subprocess32 >= 3.5.0" --depends "${PYTHON2_PKG_PREFIX}-pathlib2" --depends "${PYTHON2_PKG_PREFIX}-scandir" --python-disable-dependency ruamel.yaml "${ruamel_depends[@]}" "${iterargs[@]}"
fi
# schema_salad. This is a python dependency of arvados-cwl-runner,
# And for cwltool we have the same problem as for schema_salad. Ward, 2016-03-17
cwltoolversion=$(cat "$WORKSPACE/sdk/cwl/setup.py" | grep cwltool== | sed "s/.*==\(.*\)'.*/\1/")
-test_package_presence python-cwltool "$cwltoolversion" python 2
+test_package_presence python-cwltool "$cwltoolversion" python 3
if [[ "$?" == "0" ]]; then
- fpm_build cwltool "" "" python $cwltoolversion --iteration 2
+ fpm_build cwltool "" "" python $cwltoolversion --iteration 3 --python-disable-dependency ruamel.yaml "${ruamel_depends[@]}"
fi
# The PAM module
fi
declare $(format_last_commit_here "git_ts=%ct git_hash=%h")
- ARVADOS_BUILDING_VERSION="$(git describe --abbrev=0).$(date -ud "@$git_ts" +%Y%m%d%H%M%S)"
+ ARVADOS_BUILDING_VERSION="$(git tag -l |sort -V -r |head -n1).$(date -ud "@$git_ts" +%Y%m%d%H%M%S)"
echo "$ARVADOS_BUILDING_VERSION"
}
arv:WorkflowRunnerResources:
ramMin: 2048
coresMin: 2
+ keep_cache: 512
arv:ClusterTarget:
cluster_id: clsr1
project_uuid: clsr1-j7d0g-qxc4jcji7n4lafx
|_. Field |_. Type |_. Description |
|ramMin|int|RAM, in mebibytes, to reserve for the arvados-cwl-runner process. Default 1 GiB|
|coresMin|int|Number of cores to reserve to the arvados-cwl-runner process. Default 1 core.|
+|keep_cache|int|Size of collection metadata cache for the workflow runner, in MiB. Default 256 MiB. Will be added on to the RAM request when determining node size to request.|
h2(#clustertarget). arv:ClusterTarget
# apt.arvados.org
-deb http://apt.arvados.org/ jessie main
deb http://apt.arvados.org/ jessie-dev main
exit
end
-git_latest_tag = `git describe --abbrev=0`
+git_latest_tag = `git tag -l |sort -V -r |head -n1`
git_latest_tag = git_latest_tag.encode('utf-8').strip
git_timestamp, git_hash = `git log -n1 --first-parent --format=%ct:%H .`.chomp.split(":")
git_timestamp = Time.at(git_timestamp.to_i).utc
default=None,
metavar="CLUSTER_ID")
+ parser.add_argument("--collection-cache-size", type=int,
+ default=None,
+ help="Collection cache size (in MiB, default 256).")
+
parser.add_argument("--name", type=str,
help="Name to use for workflow execution instance.",
default=None)
help=argparse.SUPPRESS)
parser.add_argument("--thread-count", type=int,
- default=4, help="Number of threads to use for job submit and output collection.")
+ default=1, help="Number of threads to use for job submit and output collection.")
parser.add_argument("--http-timeout", type=int,
default=5*60, dest="http_timeout", help="API request timeout in seconds. Default is 300 seconds (5 minutes).")
type: int?
doc: Minimum cores allocated to cwl-runner
jsonldPredicate: "https://w3id.org/cwl/cwl#ResourceRequirement/coresMin"
+ keep_cache:
+ type: int?
+ doc: |
+ Size of collection metadata cache for the workflow runner, in
+ MiB. Default 256 MiB. Will be added on to the RAM request
+ when determining node size to request.
+ jsonldPredicate: "http://arvados.org/cwl#RuntimeConstraints/keep_cache"
- name: ClusterTarget
type: record
"secret_mounts": secret_mounts,
"runtime_constraints": {
"vcpus": math.ceil(self.submit_runner_cores),
- "ram": math.ceil(1024*1024 * self.submit_runner_ram),
+ "ram": 1024*1024 * (math.ceil(self.submit_runner_ram) + math.ceil(self.collection_cache_size)),
"API": True
},
"use_existing": self.enable_reuse,
# --eval-timeout is the timeout for javascript invocation
# --parallel-task-count is the number of threads to use for job submission
# --enable/disable-reuse sets desired job reuse
+ # --collection-cache-size sets aside memory to store collections
command = ["arvados-cwl-runner",
"--local",
"--api=containers",
"--disable-validate",
"--eval-timeout=%s" % self.arvrunner.eval_timeout,
"--thread-count=%s" % self.arvrunner.thread_count,
- "--enable-reuse" if self.enable_reuse else "--disable-reuse"]
+ "--enable-reuse" if self.enable_reuse else "--disable-reuse",
+ "--collection-cache-size=%s" % self.collection_cache_size]
if self.output_name:
command.append("--output-name=" + self.output_name)
def job(self, joborder, output_callback, runtimeContext):
- builder = self._init_job(joborder, runtimeContext)
+ builder = make_builder(joborder, self.hints, self.requirements, runtimeContext)
runtimeContext = set_cluster_target(self.tool, self.arvrunner, builder, runtimeContext)
req, _ = self.get_requirement("http://arvados.org/cwl#RunInSingleContainer")
self.submit_runner_cluster = None
self.cluster_target_id = 0
self.always_submit_runner = False
+ self.collection_cache_size = 256
super(ArvRuntimeContext, self).__init__(kwargs)
from .runner import Runner, upload_docker, upload_job_order, upload_workflow_deps
from .arvtool import ArvadosCommandTool, validate_cluster_target
from .arvworkflow import ArvadosWorkflow, upload_workflow
-from .fsaccess import CollectionFsAccess, CollectionFetcher, collectionResolver, CollectionCache
+from .fsaccess import CollectionFsAccess, CollectionFetcher, collectionResolver, CollectionCache, pdh_size
from .perf import Perf
from .pathmapper import NoFollowPathMapper
from .task_queue import TaskQueue
from ._version import __version__
from cwltool.process import shortname, UnsupportedRequirement, use_custom_schema
-from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, get_listing
+from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, get_listing, visit_class
from cwltool.command_line_tool import compute_checksums
logger = logging.getLogger('arvados.cwl-runner')
arvargs.output_name = None
arvargs.output_tags = None
arvargs.thread_count = 1
+ arvargs.collection_cache_size = None
self.api = api_client
self.processes = {}
self.thread_count = arvargs.thread_count
self.poll_interval = 12
self.loadingContext = None
+ self.should_estimate_cache_size = True
if keep_client is not None:
self.keep_client = keep_client
else:
self.keep_client = arvados.keep.KeepClient(api_client=self.api, num_retries=self.num_retries)
- self.collection_cache = CollectionCache(self.api, self.keep_client, self.num_retries)
+ if arvargs.collection_cache_size:
+ collection_cache_size = arvargs.collection_cache_size*1024*1024
+ self.should_estimate_cache_size = False
+ else:
+ collection_cache_size = 256*1024*1024
+
+ self.collection_cache = CollectionCache(self.api, self.keep_client, self.num_retries,
+ cap=collection_cache_size)
self.fetcher_constructor = partial(CollectionFetcher,
api_client=self.api,
def start_run(self, runnable, runtimeContext):
- self.task_queue.add(partial(runnable.run, runtimeContext))
+ self.task_queue.add(partial(runnable.run, runtimeContext),
+ self.workflow_eval_lock, self.stop_polling)
def process_submitted(self, container):
with self.workflow_eval_lock:
with self.workflow_eval_lock:
j = self.processes[uuid]
logger.info("%s %s is %s", self.label(j), uuid, record["state"])
- self.task_queue.add(partial(j.done, record))
+ self.task_queue.add(partial(j.done, record),
+ self.workflow_eval_lock, self.stop_polling)
del self.processes[uuid]
def runtime_status_update(self, kind, message, detail=None):
if runtimeContext.priority < 1 or runtimeContext.priority > 1000:
raise Exception("--priority must be in the range 1..1000.")
+ if self.should_estimate_cache_size:
+ visited = set()
+ estimated_size = [0]
+ def estimate_collection_cache(obj):
+ if obj.get("location", "").startswith("keep:"):
+ m = pdh_size.match(obj["location"][5:])
+ if m and m.group(1) not in visited:
+ visited.add(m.group(1))
+ estimated_size[0] += int(m.group(2))
+ visit_class(job_order, ("File", "Directory"), estimate_collection_cache)
+ runtimeContext.collection_cache_size = max(((estimated_size[0]*192) / (1024*1024))+1, 256)
+ self.collection_cache.set_cap(runtimeContext.collection_cache_size*1024*1024)
+
+ logger.info("Using collection cache size %s MiB", runtimeContext.collection_cache_size)
+
runnerjob = None
if runtimeContext.submit:
# Submit a runner job to run the workflow for us.
intermediate_output_ttl=runtimeContext.intermediate_output_ttl,
merged_map=merged_map,
priority=runtimeContext.priority,
- secret_store=self.secret_store)
+ secret_store=self.secret_store,
+ collection_cache_size=runtimeContext.collection_cache_size,
+ collection_cache_is_default=self.should_estimate_cache_size)
elif self.work_api == "jobs":
runnerjob = RunnerJob(self, tool, job_order, runtimeContext.enable_reuse,
self.output_name,
else:
logger.error("Workflow is deadlocked, no runnable processes and not waiting on any pending processes.")
break
+
+ if self.stop_polling.is_set():
+ break
+
loopperf.__enter__()
loopperf.__exit__()
logger = logging.getLogger('arvados.cwl-runner')
+pdh_size = re.compile(r'([0-9a-f]{32})\+(\d+)(\+\S+)*')
+
class CollectionCache(object):
def __init__(self, api_client, keep_client, num_retries,
cap=256*1024*1024,
self.cap = cap
self.min_entries = min_entries
- def cap_cache(self):
- if self.total > self.cap:
- # ordered list iterates from oldest to newest
- for pdh, v in self.collections.items():
- if self.total < self.cap or len(self.collections) < self.min_entries:
- break
- # cut it loose
- logger.debug("Evicting collection reader %s from cache", pdh)
- del self.collections[pdh]
- self.total -= v[1]
+ def set_cap(self, cap):
+ self.cap = cap
+
+ def cap_cache(self, required):
+ # ordered dict iterates from oldest to newest
+ for pdh, v in self.collections.items():
+ available = self.cap - self.total
+ if available >= required or len(self.collections) < self.min_entries:
+ return
+ # cut it loose
+ logger.debug("Evicting collection reader %s from cache (cap %s total %s required %s)", pdh, self.cap, self.total, required)
+ del self.collections[pdh]
+ self.total -= v[1]
def get(self, pdh):
with self.lock:
if pdh not in self.collections:
+ m = pdh_size.match(pdh)
+ if m:
+ self.cap_cache(int(m.group(2)) * 128)
logger.debug("Creating collection reader for %s", pdh)
cr = arvados.collection.CollectionReader(pdh, api_client=self.api_client,
keep_client=self.keep_client,
sz = len(cr.manifest_text()) * 128
self.collections[pdh] = (cr, sz)
self.total += sz
- self.cap_cache()
else:
cr, sz = self.collections[pdh]
# bump it to the back
output_name, output_tags, submit_runner_ram=0,
name=None, on_error=None, submit_runner_image=None,
intermediate_output_ttl=0, merged_map=None,
- priority=None, secret_store=None):
+ priority=None, secret_store=None,
+ collection_cache_size=256,
+ collection_cache_is_default=True):
self.arvrunner = runner
self.tool = tool
self.job_order = job_order
self.submit_runner_cores = 1
self.submit_runner_ram = 1024 # defaut 1 GiB
+ self.collection_cache_size = collection_cache_size
runner_resource_req, _ = self.tool.get_requirement("http://arvados.org/cwl#WorkflowRunnerResources")
if runner_resource_req:
self.submit_runner_cores = runner_resource_req["coresMin"]
if runner_resource_req.get("ramMin"):
self.submit_runner_ram = runner_resource_req["ramMin"]
+ if runner_resource_req.get("keep_cache") and collection_cache_is_default:
+ self.collection_cache_size = runner_resource_req["keep_cache"]
if submit_runner_ram:
# Command line / initializer overrides default and/or spec from workflow
class TaskQueue(object):
def __init__(self, lock, thread_count):
self.thread_count = thread_count
- self.task_queue = Queue.Queue()
+ self.task_queue = Queue.Queue(maxsize=self.thread_count)
self.task_queue_threads = []
self.lock = lock
self.in_flight = 0
t.start()
def task_queue_func(self):
+ while True:
+ task = self.task_queue.get()
+ if task is None:
+ return
+ try:
+ task()
+ except Exception as e:
+ logger.exception("Unhandled exception running task")
+ self.error = e
- while True:
- task = self.task_queue.get()
- if task is None:
- return
- try:
- task()
- except Exception as e:
- logger.exception("Unhandled exception running task")
- self.error = e
-
- with self.lock:
- self.in_flight -= 1
-
- def add(self, task):
- with self.lock:
- if self.thread_count > 1:
+ with self.lock:
+ self.in_flight -= 1
+
+ def add(self, task, unlock, check_done):
+ if self.thread_count > 1:
+ with self.lock:
self.in_flight += 1
- self.task_queue.put(task)
- else:
- task()
+ else:
+ task()
+ return
+
+ while True:
+ try:
+ unlock.release()
+ if check_done.is_set():
+ return
+ self.task_queue.put(task, block=True, timeout=3)
+ return
+ except Queue.Full:
+ pass
+ finally:
+ unlock.acquire()
+
def drain(self):
try:
SETUP_DIR = os.path.dirname(__file__) or '.'
def git_latest_tag():
- gitinfo = subprocess.check_output(
- ['git', 'describe', '--abbrev=0']).strip()
- return str(gitinfo.decode('utf-8'))
+ gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+ gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+ return str(next(iter(gittags)).decode('utf-8'))
def choose_version_from():
sdk_ts = subprocess.check_output(
from source package), leave it alone.
"""
def git_latest_tag(self):
- gitinfo = subprocess.check_output(
- ['git', 'describe', '--abbrev=0']).strip()
- return str(gitinfo.decode('utf-8'))
+ gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+ gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+ return str(next(iter(gittags)).decode('utf-8'))
def git_timestamp_tag(self):
gitinfo = subprocess.check_output(
'schema-salad==2.7.20181116024232',
'typing >= 3.6.4',
'ruamel.yaml >=0.15.54, <=0.15.77',
- 'arvados-python-client>=1.1.4.20180607143841',
+ 'arvados-python-client>=1.2.1.20181130020805',
'setuptools',
'ciso8601 >=1.0.6, <2.0.0',
'subprocess32>=3.5.1',
cache = CollectionCache(mock.MagicMock(), mock.MagicMock(), 4)
cr().manifest_text.return_value = 'x' * 524289
self.assertEqual(0, cache.total)
- c1 = cache.get("99999999999999999999999999999991+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertNotIn("99999999999999999999999999999992+99", cache.collections)
+ c1 = cache.get("99999999999999999999999999999991+524289")
+ self.assertIn("99999999999999999999999999999991+524289", cache.collections)
+ self.assertNotIn("99999999999999999999999999999992+524289", cache.collections)
self.assertEqual((524289*128)*1, cache.total)
- c2 = cache.get("99999999999999999999999999999992+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertIn("99999999999999999999999999999992+99", cache.collections)
+ c2 = cache.get("99999999999999999999999999999992+524289")
+ self.assertIn("99999999999999999999999999999991+524289", cache.collections)
+ self.assertIn("99999999999999999999999999999992+524289", cache.collections)
self.assertEqual((524289*128)*2, cache.total)
- c1 = cache.get("99999999999999999999999999999991+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertIn("99999999999999999999999999999992+99", cache.collections)
+ c1 = cache.get("99999999999999999999999999999991+524289")
+ self.assertIn("99999999999999999999999999999991+524289", cache.collections)
+ self.assertIn("99999999999999999999999999999992+524289", cache.collections)
self.assertEqual((524289*128)*2, cache.total)
- c3 = cache.get("99999999999999999999999999999993+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertIn("99999999999999999999999999999992+99", cache.collections)
+ c3 = cache.get("99999999999999999999999999999993+524289")
+ self.assertIn("99999999999999999999999999999991+524289", cache.collections)
+ self.assertIn("99999999999999999999999999999992+524289", cache.collections)
self.assertEqual((524289*128)*3, cache.total)
- c4 = cache.get("99999999999999999999999999999994+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertNotIn("99999999999999999999999999999992+99", cache.collections)
+ c4 = cache.get("99999999999999999999999999999994+524289")
+ self.assertIn("99999999999999999999999999999991+524289", cache.collections)
+ self.assertNotIn("99999999999999999999999999999992+524289", cache.collections)
self.assertEqual((524289*128)*3, cache.total)
- c5 = cache.get("99999999999999999999999999999995+99")
- self.assertNotIn("99999999999999999999999999999991+99", cache.collections)
- self.assertNotIn("99999999999999999999999999999992+99", cache.collections)
+ c5 = cache.get("99999999999999999999999999999995+524289")
+ self.assertNotIn("99999999999999999999999999999991+524289", cache.collections)
+ self.assertNotIn("99999999999999999999999999999992+524289", cache.collections)
self.assertEqual((524289*128)*3, cache.total)
cache = CollectionCache(mock.MagicMock(), mock.MagicMock(), 4)
cr().manifest_text.return_value = 'x' * 524287
self.assertEqual(0, cache.total)
- c1 = cache.get("99999999999999999999999999999991+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertNotIn("99999999999999999999999999999992+99", cache.collections)
+ c1 = cache.get("99999999999999999999999999999991+524287")
+ self.assertIn("99999999999999999999999999999991+524287", cache.collections)
+ self.assertNotIn("99999999999999999999999999999992+524287", cache.collections)
self.assertEqual((524287*128)*1, cache.total)
- c2 = cache.get("99999999999999999999999999999992+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertIn("99999999999999999999999999999992+99", cache.collections)
+ c2 = cache.get("99999999999999999999999999999992+524287")
+ self.assertIn("99999999999999999999999999999991+524287", cache.collections)
+ self.assertIn("99999999999999999999999999999992+524287", cache.collections)
self.assertEqual((524287*128)*2, cache.total)
- c1 = cache.get("99999999999999999999999999999991+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertIn("99999999999999999999999999999992+99", cache.collections)
+ c1 = cache.get("99999999999999999999999999999991+524287")
+ self.assertIn("99999999999999999999999999999991+524287", cache.collections)
+ self.assertIn("99999999999999999999999999999992+524287", cache.collections)
self.assertEqual((524287*128)*2, cache.total)
- c3 = cache.get("99999999999999999999999999999993+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertIn("99999999999999999999999999999992+99", cache.collections)
+ c3 = cache.get("99999999999999999999999999999993+524287")
+ self.assertIn("99999999999999999999999999999991+524287", cache.collections)
+ self.assertIn("99999999999999999999999999999992+524287", cache.collections)
self.assertEqual((524287*128)*3, cache.total)
- c4 = cache.get("99999999999999999999999999999994+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertIn("99999999999999999999999999999992+99", cache.collections)
+ c4 = cache.get("99999999999999999999999999999994+524287")
+ self.assertIn("99999999999999999999999999999991+524287", cache.collections)
+ self.assertIn("99999999999999999999999999999992+524287", cache.collections)
self.assertEqual((524287*128)*4, cache.total)
- c5 = cache.get("99999999999999999999999999999995+99")
- self.assertIn("99999999999999999999999999999991+99", cache.collections)
- self.assertNotIn("99999999999999999999999999999992+99", cache.collections)
+ c5 = cache.get("99999999999999999999999999999995+524287")
+ self.assertIn("99999999999999999999999999999991+524287", cache.collections)
+ self.assertNotIn("99999999999999999999999999999992+524287", cache.collections)
self.assertEqual((524287*128)*4, cache.total)
- c6 = cache.get("99999999999999999999999999999996+99")
- self.assertNotIn("99999999999999999999999999999991+99", cache.collections)
- self.assertNotIn("99999999999999999999999999999992+99", cache.collections)
+ c6 = cache.get("99999999999999999999999999999996+524287")
+ self.assertNotIn("99999999999999999999999999999991+524287", cache.collections)
+ self.assertNotIn("99999999999999999999999999999992+524287", cache.collections)
self.assertEqual((524287*128)*4, cache.total)
'state': 'Committed',
'command': ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--enable-reuse', '--debug', '--on-error=continue',
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=256", '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json'],
'name': 'submit_wf.cwl',
'container_image': '999999999999999999999999999999d3+99',
'runtime_constraints': {
'API': True,
'vcpus': 1,
- 'ram': 1024*1024*1024
+ 'ram': (1024+256)*1024*1024
},
'use_existing': True,
'properties': {},
expect_container["command"] = [
'arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--disable-reuse', '--debug', '--on-error=continue',
+ '--eval-timeout=20', '--thread-count=1',
+ '--disable-reuse', "--collection-cache-size=256",
+ '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container["use_existing"] = False
expect_container["command"] = [
'arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--disable-reuse', '--debug', '--on-error=continue',
+ '--eval-timeout=20', '--thread-count=1',
+ '--disable-reuse', "--collection-cache-size=256", '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container["use_existing"] = False
expect_container["name"] = "submit_wf_no_reuse.cwl"
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--enable-reuse', '--debug', '--on-error=stop',
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=256",
+ '--debug', '--on-error=stop',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
stubs.api.container_requests().create.assert_called_with(
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--enable-reuse',
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=256",
"--output-name="+output_name, '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container["output_name"] = output_name
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--enable-reuse', "--debug",
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=256", "--debug",
"--storage-classes=foo", '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--enable-reuse', '--debug', '--on-error=continue',
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=256", '--debug',
+ '--on-error=continue',
"--intermediate-output-ttl=3600",
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--enable-reuse', '--debug', '--on-error=continue',
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=256",
+ '--debug', '--on-error=continue',
"--trash-intermediate",
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--enable-reuse',
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=256",
"--output-tags="+output_tags, '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
logging.exception("")
expect_container = copy.deepcopy(stubs.expect_container_spec)
- expect_container["runtime_constraints"]["ram"] = 2048*1024*1024
+ expect_container["runtime_constraints"]["ram"] = (2048+256)*1024*1024
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
'container_image': '999999999999999999999999999999d3+99',
'command': ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--enable-reuse', '--debug', '--on-error=continue',
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=256", '--debug', '--on-error=continue',
'/var/lib/cwl/workflow/expect_arvworkflow.cwl#main', '/var/lib/cwl/cwl.input.json'],
'cwd': '/var/spool/cwl',
'runtime_constraints': {
'API': True,
'vcpus': 1,
- 'ram': 1073741824
+ 'ram': 1342177280
},
'use_existing': True,
'properties': {},
'container_image': "999999999999999999999999999999d3+99",
'command': ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
- '--enable-reuse', '--debug', '--on-error=continue',
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=256", '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json'],
'cwd': '/var/spool/cwl',
'runtime_constraints': {
'API': True,
'vcpus': 1,
- 'ram': 1073741824
+ 'ram': 1342177280
},
'use_existing': True,
'properties': {
expect_container["owner_uuid"] = project_uuid
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- "--eval-timeout=20", "--thread-count=4",
- '--enable-reuse', '--debug', '--on-error=continue',
+ "--eval-timeout=20", "--thread-count=1",
+ '--enable-reuse', "--collection-cache-size=256", '--debug',
+ '--on-error=continue',
'--project-uuid='+project_uuid,
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=60.0', '--thread-count=4',
- '--enable-reuse', '--debug', '--on-error=continue',
+ '--eval-timeout=60.0', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=256",
+ '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
stubs.api.container_requests().create.assert_called_with(
self.assertEqual(capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
+ @stubs
+ def test_submit_container_collection_cache(self, stubs):
+ project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
+ capture_stdout = cStringIO.StringIO()
+ try:
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--collection-cache-size=500",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
+ self.assertEqual(exited, 0)
+ except:
+ logging.exception("")
+
+ expect_container = copy.deepcopy(stubs.expect_container_spec)
+ expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
+ '--no-log-timestamps', '--disable-validate',
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=500",
+ '--debug', '--on-error=continue',
+ '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
+ expect_container["runtime_constraints"]["ram"] = (1024+500)*1024*1024
+
+ stubs.api.container_requests().create.assert_called_with(
+ body=JsonDiffMatcher(expect_container))
+ self.assertEqual(capture_stdout.getvalue(),
+ stubs.expect_container_request_uuid + '\n')
+
@stubs
def test_submit_container_thread_count(self, stubs):
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
'--eval-timeout=20', '--thread-count=20',
- '--enable-reuse', '--debug', '--on-error=continue',
+ '--enable-reuse', "--collection-cache-size=256",
+ '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
stubs.api.container_requests().create.assert_called_with(
expect_container["runtime_constraints"] = {
"API": True,
"vcpus": 2,
- "ram": 2000 * 2**20
+ "ram": (2000+512) * 2**20
}
expect_container["name"] = "submit_wf_runner_resources.cwl"
expect_container["mounts"]["/var/lib/cwl/workflow.json"]["content"]["$graph"][1]["hints"] = [
{
"class": "http://arvados.org/cwl#WorkflowRunnerResources",
"coresMin": 2,
- "ramMin": 2000
+ "ramMin": 2000,
+ "keep_cache": 512
}
]
expect_container["mounts"]["/var/lib/cwl/workflow.json"]["content"]["$graph"][0]["$namespaces"] = {
"arv": "http://arvados.org/cwl#",
}
+ expect_container['command'] = ['arvados-cwl-runner', '--local', '--api=containers',
+ '--no-log-timestamps', '--disable-validate',
+ '--eval-timeout=20', '--thread-count=1',
+ '--enable-reuse', "--collection-cache-size=512", '--debug', '--on-error=continue',
+ '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
"--no-log-timestamps",
"--disable-validate",
"--eval-timeout=20",
- '--thread-count=4',
+ '--thread-count=1',
"--enable-reuse",
+ "--collection-cache-size=256",
'--debug',
"--on-error=continue",
"/var/lib/cwl/workflow.json#main",
"properties": {},
"runtime_constraints": {
"API": True,
- "ram": 1073741824,
+ "ram": 1342177280,
"vcpus": 1
},
"secret_mounts": {
class TestTaskQueue(unittest.TestCase):
def test_tq(self):
tq = TaskQueue(threading.Lock(), 2)
+ try:
+ self.assertIsNone(tq.error)
- self.assertIsNone(tq.error)
-
- tq.add(success_task)
- tq.add(success_task)
- tq.add(success_task)
- tq.add(success_task)
+ unlock = threading.Lock()
+ unlock.acquire()
+ check_done = threading.Event()
- tq.join()
+ tq.add(success_task, unlock, check_done)
+ tq.add(success_task, unlock, check_done)
+ tq.add(success_task, unlock, check_done)
+ tq.add(success_task, unlock, check_done)
+ finally:
+ tq.join()
self.assertIsNone(tq.error)
def test_tq_error(self):
tq = TaskQueue(threading.Lock(), 2)
-
- self.assertIsNone(tq.error)
-
- tq.add(success_task)
- tq.add(success_task)
- tq.add(fail_task)
- tq.add(success_task)
-
- tq.join()
+ try:
+ self.assertIsNone(tq.error)
+
+ unlock = threading.Lock()
+ unlock.acquire()
+ check_done = threading.Event()
+
+ tq.add(success_task, unlock, check_done)
+ tq.add(success_task, unlock, check_done)
+ tq.add(fail_task, unlock, check_done)
+ tq.add(success_task, unlock, check_done)
+ finally:
+ tq.join()
self.assertIsNotNone(tq.error)
arv:WorkflowRunnerResources:
ramMin: 2000
coresMin: 2
+ keep_cache: 512
inputs:
- id: x
type: File
import re
def git_latest_tag():
- gitinfo = subprocess.check_output(
- ['git', 'describe', '--abbrev=0']).strip()
- return str(gitinfo.decode('utf-8'))
+ gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+ gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+ return str(next(iter(gittags)).decode('utf-8'))
def git_timestamp_tag():
gitinfo = subprocess.check_output(
import re
def git_latest_tag():
- gitinfo = subprocess.check_output(
- ['git', 'describe', '--abbrev=0']).strip()
- return str(gitinfo.decode('utf-8'))
+ gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+ gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+ return str(next(iter(gittags)).decode('utf-8'))
def git_timestamp_tag():
gitinfo = subprocess.check_output(
from source package), leave it alone.
"""
def git_latest_tag(self):
- gitinfo = subprocess.check_output(
- ['git', 'describe', '--abbrev=0']).strip()
- return str(gitinfo.decode('utf-8'))
+ gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+ gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+ return str(next(iter(gittags)).decode('utf-8'))
def git_timestamp_tag(self):
gitinfo = subprocess.check_output(
exit
end
-git_latest_tag = `git describe --abbrev=0`
+git_latest_tag = `git tag -l |sort -V -r |head -n1`
git_latest_tag = git_latest_tag.encode('utf-8').strip
git_timestamp, git_hash = `git log -n1 --first-parent --format=%ct:%H .`.chomp.split(":")
git_timestamp = Time.at(git_timestamp.to_i).utc
if !include_trash
if sql_table != "api_client_authorizations"
# Only include records where the owner is not trashed
- sql_conds = "NOT EXISTS(SELECT 1 FROM #{PERMISSION_VIEW} "+
- "WHERE trashed = 1 AND "+
- "(#{sql_table}.owner_uuid = target_uuid)) #{exclude_trashed_records}"
+ sql_conds = "#{sql_table}.owner_uuid NOT IN (SELECT target_uuid FROM #{PERMISSION_VIEW} "+
+ "WHERE trashed = 1) #{exclude_trashed_records}"
end
end
else
# see issue 13208 for details.
# Match a direct read permission link from the user to the record uuid
- direct_check = "EXISTS(SELECT 1 FROM #{PERMISSION_VIEW} "+
- "WHERE user_uuid IN (:user_uuids) AND perm_level >= 1 #{trashed_check} AND target_uuid = #{sql_table}.uuid)"
+ direct_check = "#{sql_table}.uuid IN (SELECT target_uuid FROM #{PERMISSION_VIEW} "+
+ "WHERE user_uuid IN (:user_uuids) AND perm_level >= 1 #{trashed_check})"
# Match a read permission link from the user to the record's owner_uuid
owner_check = ""
if sql_table != "api_client_authorizations" and sql_table != "groups" then
- owner_check = "OR EXISTS(SELECT 1 FROM #{PERMISSION_VIEW} "+
- "WHERE user_uuid IN (:user_uuids) AND perm_level >= 1 #{trashed_check} AND target_uuid = #{sql_table}.owner_uuid AND target_owner_uuid IS NOT NULL) "
+ owner_check = "OR #{sql_table}.owner_uuid IN (SELECT target_uuid FROM #{PERMISSION_VIEW} "+
+ "WHERE user_uuid IN (:user_uuids) AND perm_level >= 1 #{trashed_check} AND target_owner_uuid IS NOT NULL) "
end
links_cond = ""
cast = serialized_attributes[column] ? '::text' : ''
"coalesce(#{column}#{cast},'')"
end
- "to_tsvector('english', #{parts.join(" || ' ' || ")})"
+ "to_tsvector('english', substr(#{parts.join(" || ' ' || ")}, 0, 8000))"
end
def self.apply_filters query, filters
--- /dev/null
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+require './db/migrate/20161213172944_full_text_search_indexes'
+
+class ReplaceFullTextIndexes < ActiveRecord::Migration
+ def up
+ FullTextSearchIndexes.new.up
+ end
+
+ def down
+ end
+end
-- Name: collections_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX collections_full_text_search_idx ON public.collections USING gin (to_tsvector('english'::regconfig, (((((((((((((((((COALESCE(owner_uuid, ''::character varying))::text || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(portable_data_hash, ''::character varying))::text) || ' '::text) || (COALESCE(uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || COALESCE((properties)::text, ''::text)) || ' '::text) || COALESCE(file_names, (''::character varying)::text))));
+CREATE INDEX collections_full_text_search_idx ON public.collections USING gin (to_tsvector('english'::regconfig, substr((((((((((((((((((COALESCE(owner_uuid, ''::character varying))::text || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(portable_data_hash, ''::character varying))::text) || ' '::text) || (COALESCE(uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || COALESCE((properties)::text, ''::text)) || ' '::text) || COALESCE(file_names, ''::text)), 0, 1000000)));
--
-- Name: container_requests_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX container_requests_full_text_search_idx ON public.container_requests USING gin (to_tsvector('english'::regconfig, (((((((((((((((((((((((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(description, ''::text)) || ' '::text) || COALESCE((properties)::text, ''::text)) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || (COALESCE(requesting_container_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(container_uuid, ''::character varying))::text) || ' '::text) || COALESCE(runtime_constraints, ''::text)) || ' '::text) || (COALESCE(container_image, ''::character varying))::text) || ' '::text) || COALESCE(environment, ''::text)) || ' '::text) || (COALESCE(cwd, ''::character varying))::text) || ' '::text) || COALESCE(command, ''::text)) || ' '::text) || (COALESCE(output_path, ''::character varying))::text) || ' '::text) || COALESCE(filters, ''::text)) || ' '::text) || COALESCE(scheduling_parameters, ''::text)) || ' '::text) || (COALESCE(output_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(log_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(output_name, ''::character varying))::text)));
+CREATE INDEX container_requests_full_text_search_idx ON public.container_requests USING gin (to_tsvector('english'::regconfig, substr((((((((((((((((((((((((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(description, ''::text)) || ' '::text) || COALESCE((properties)::text, ''::text)) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || (COALESCE(requesting_container_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(container_uuid, ''::character varying))::text) || ' '::text) || COALESCE(runtime_constraints, ''::text)) || ' '::text) || (COALESCE(container_image, ''::character varying))::text) || ' '::text) || COALESCE(environment, ''::text)) || ' '::text) || (COALESCE(cwd, ''::character varying))::text) || ' '::text) || COALESCE(command, ''::text)) || ' '::text) || (COALESCE(output_path, ''::character varying))::text) || ' '::text) || COALESCE(filters, ''::text)) || ' '::text) || COALESCE(scheduling_parameters, ''::text)) || ' '::text) || (COALESCE(output_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(log_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(output_name, ''::character varying))::text), 0, 1000000)));
--
-- Name: groups_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX groups_full_text_search_idx ON public.groups USING gin (to_tsvector('english'::regconfig, (((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || (COALESCE(group_class, ''::character varying))::text) || ' '::text) || COALESCE((properties)::text, ''::text))));
+CREATE INDEX groups_full_text_search_idx ON public.groups USING gin (to_tsvector('english'::regconfig, substr((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || (COALESCE(group_class, ''::character varying))::text) || ' '::text) || COALESCE((properties)::text, ''::text)), 0, 1000000)));
--
-- Name: jobs_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX jobs_full_text_search_idx ON public.jobs USING gin (to_tsvector('english'::regconfig, (((((((((((((((((((((((((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(submit_id, ''::character varying))::text) || ' '::text) || (COALESCE(script, ''::character varying))::text) || ' '::text) || (COALESCE(script_version, ''::character varying))::text) || ' '::text) || COALESCE(script_parameters, ''::text)) || ' '::text) || (COALESCE(cancelled_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(cancelled_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(output, ''::character varying))::text) || ' '::text) || (COALESCE(is_locked_by_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(log, ''::character varying))::text) || ' '::text) || COALESCE(tasks_summary, ''::text)) || ' '::text) || COALESCE(runtime_constraints, ''::text)) || ' '::text) || (COALESCE(repository, ''::character varying))::text) || ' '::text) || (COALESCE(supplied_script_version, ''::character varying))::text) || ' '::text) || (COALESCE(docker_image_locator, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || (COALESCE(arvados_sdk_version, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text))));
+CREATE INDEX jobs_full_text_search_idx ON public.jobs USING gin (to_tsvector('english'::regconfig, substr((((((((((((((((((((((((((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(submit_id, ''::character varying))::text) || ' '::text) || (COALESCE(script, ''::character varying))::text) || ' '::text) || (COALESCE(script_version, ''::character varying))::text) || ' '::text) || COALESCE(script_parameters, ''::text)) || ' '::text) || (COALESCE(cancelled_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(cancelled_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(output, ''::character varying))::text) || ' '::text) || (COALESCE(is_locked_by_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(log, ''::character varying))::text) || ' '::text) || COALESCE(tasks_summary, ''::text)) || ' '::text) || COALESCE(runtime_constraints, ''::text)) || ' '::text) || (COALESCE(repository, ''::character varying))::text) || ' '::text) || (COALESCE(supplied_script_version, ''::character varying))::text) || ' '::text) || (COALESCE(docker_image_locator, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || (COALESCE(arvados_sdk_version, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text)), 0, 1000000)));
--
-- Name: pipeline_instances_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX pipeline_instances_full_text_search_idx ON public.pipeline_instances USING gin (to_tsvector('english'::regconfig, (((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(pipeline_template_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text)) || ' '::text) || COALESCE(properties, ''::text)) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || COALESCE(components_summary, ''::text)) || ' '::text) || (COALESCE(description, ''::character varying))::text)));
+CREATE INDEX pipeline_instances_full_text_search_idx ON public.pipeline_instances USING gin (to_tsvector('english'::regconfig, substr((((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(pipeline_template_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text)) || ' '::text) || COALESCE(properties, ''::text)) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || COALESCE(components_summary, ''::text)) || ' '::text) || (COALESCE(description, ''::character varying))::text), 0, 1000000)));
--
-- Name: pipeline_templates_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX pipeline_templates_full_text_search_idx ON public.pipeline_templates USING gin (to_tsvector('english'::regconfig, (((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text)) || ' '::text) || (COALESCE(description, ''::character varying))::text)));
+CREATE INDEX pipeline_templates_full_text_search_idx ON public.pipeline_templates USING gin (to_tsvector('english'::regconfig, substr((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text)) || ' '::text) || (COALESCE(description, ''::character varying))::text), 0, 1000000)));
--
-- Name: workflows_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX workflows_full_text_search_idx ON public.workflows USING gin (to_tsvector('english'::regconfig, (((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(description, ''::text))));
+CREATE INDEX workflows_full_text_search_idx ON public.workflows USING gin (to_tsvector('english'::regconfig, substr((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(description, ''::text)), 0, 1000000)));
--
INSERT INTO schema_migrations (version) VALUES ('20180915155335');
+INSERT INTO schema_migrations (version) VALUES ('20180917200000');
+
INSERT INTO schema_migrations (version) VALUES ('20180917205609');
INSERT INTO schema_migrations (version) VALUES ('20180919001158');
import re
def git_latest_tag():
- gitinfo = subprocess.check_output(
- ['git', 'describe', '--abbrev=0']).strip()
- return str(gitinfo.decode('utf-8'))
+ gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+ gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+ return str(next(iter(gittags)).decode('utf-8'))
def git_timestamp_tag():
gitinfo = subprocess.check_output(
import re
def git_latest_tag():
- gitinfo = subprocess.check_output(
- ['git', 'describe', '--abbrev=0']).strip()
- return str(gitinfo.decode('utf-8'))
+ gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+ gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+ return str(next(iter(gittags)).decode('utf-8'))
def git_timestamp_tag():
gitinfo = subprocess.check_output(
exit
end
-git_latest_tag = `git describe --abbrev=0`
+git_latest_tag = `git tag -l |sort -V -r |head -n1`
git_latest_tag = git_latest_tag.encode('utf-8').strip
git_timestamp, git_hash = `git log -n1 --first-parent --format=%ct:%H .`.chomp.split(":")
git_timestamp = Time.at(git_timestamp.to_i).utc
import re
def git_latest_tag():
- gitinfo = subprocess.check_output(
- ['git', 'describe', '--abbrev=0']).strip()
- return str(gitinfo.decode('utf-8'))
+ gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+ gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+ return str(next(iter(gittags)).decode('utf-8'))
def git_timestamp_tag():
gitinfo = subprocess.check_output(
--- /dev/null
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+
+mQINBFWln24BEADrBl5p99uKh8+rpvqJ48u4eTtjeXAWbslJotmC/CakbNSqOb9o
+ddfzRvGVeJVERt/Q/mlvEqgnyTQy+e6oEYN2Y2kqXceUhXagThnqCoxcEJ3+KM4R
+mYdoe/BJ/J/6rHOjq7Omk24z2qB3RU1uAv57iY5VGw5p45uZB4C4pNNsBJXoCvPn
+TGAs/7IrekFZDDgVraPx/hdiwopQ8NltSfZCyu/jPpWFK28TR8yfVlzYFwibj5WK
+dHM7ZTqlA1tHIG+agyPf3Rae0jPMsHR6q+arXVwMccyOi+ULU0z8mHUJ3iEMIrpT
+X+80KaN/ZjibfsBOCjcfiJSB/acn4nxQQgNZigna32velafhQivsNREFeJpzENiG
+HOoyC6qVeOgKrRiKxzymj0FIMLru/iFF5pSWcBQB7PYlt8J0G80lAcPr6VCiN+4c
+NKv03SdvA69dCOj79PuO9IIvQsJXsSq96HB+TeEmmL+xSdpGtGdCJHHM1fDeCqkZ
+hT+RtBGQL2SEdWjxbF43oQopocT8cHvyX6Zaltn0svoGs+wX3Z/H6/8P5anog43U
+65c0A+64Jj00rNDr8j31izhtQMRo892kGeQAaaxg4Pz6HnS7hRC+cOMHUU4HA7iM
+zHrouAdYeTZeZEQOA7SxtCME9ZnGwe2grxPXh/U/80WJGkzLFNcTKdv+rwARAQAB
+tDdEb2NrZXIgUmVsZWFzZSBUb29sIChyZWxlYXNlZG9ja2VyKSA8ZG9ja2VyQGRv
+Y2tlci5jb20+iQGcBBABCgAGBQJaJYMKAAoJENNu5NUL+WcWfQML/RjicnhN0G28
++Hj3icn/SHYXg8VTHMX7aAuuClZh7GoXlvVlyN0cfRHTcFPkhv1LJ5/zFVwJxlIc
+xX0DlWbv5zlPQQQfNYH7mGCt3OS0QJGDpCM9Q6iw1EqC0CdtBDIZMGn7s9pnuq5C
+3kzer097BltvuXWI+BRMvVad2dhzuOQi76jyxhprTUL6Xwm7ytNSja5Xyigfc8HF
+rXhlQxnMEpWpTttY+En1SaTgGg7/4yB9jG7UqtdaVuAvWI69V+qzJcvgW6do5XwH
+b/5waezxOU033stXcRCYkhEenm+mXzcJYXt2avg1BYIQsZuubCBlpPtZkgWWLOf+
+eQR1Qcy9IdWQsfpH8DX6cEbeiC0xMImcuufI5KDHZQk7E7q8SDbDbk5Dam+2tRef
+eTB2A+MybVQnpsgCvEBNQ2TfcWsZ6uLHMBhesx/+rmyOnpJDTvvCLlkOMTUNPISf
+GJI0IHZFHUJ/+/uRfgIzG6dSqxQ0zHXOwGg4GbhjpQ5I+5Eg2BNRkYkCHAQQAQoA
+BgUCVsO73QAKCRBcs2HlUvsNEB8rD/4t+5uEsqDglXJ8m5dfL88ARHKeFQkW17x7
+zl7ctYHHFSFfP2iajSoAVfe5WN766TsoiHgfBE0HoLK8RRO7fxs9K7Czm6nyxB3Z
+p+YgSUZIS3wqc43jp8gd2dCCQelKIDv5rEFWHuQlyZersK9AJqIggS61ZQwJLcVY
+fUVnIdJdCmUV9haR7vIfrjNP88kqiInZWHy2t8uaB7HFPpxlNYuiJsA0w98rGQuY
+6fWlX71JnBEsgG+L73XAB0fm14QP0VvEB3njBZYlsO2do2B8rh5g51htslK5wqgC
+U61lfjnykSM8yRQbOHvPK7uYdmSF3UXqcP/gjmI9+C8s8UdnMa9rv8b8cFwpEjHu
+xeCmQKYQ/tcLOtRYZ1DIvzxETGH0xbrz6wpKuIMgY7d3xaWdjUf3ylvO0DnlXJ9Y
+r15fYndzDLPSlybIO0GrE+5grHntlSBbMa5BUNozaQ/iQBEUZ/RY+AKxy+U28JJB
+W2Wb0oun6+YdhmwgFyBoSFyp446Kz2P2A1+l/AGhzltc25Vsvwha+lRZfet464yY
+GoNBurTbQWS63JWYFoTkKXmWeS2789mQOQqka3wFXMDzVtXzmxSEbaler7lZbhTj
+wjAAJzp6kdNsPbde4lUIzt6FTdJm0Ivb47hMV4dWKEnFXrYjui0ppUH1RFUU6hyz
+IF8kfxDKO4kCHAQQAQoABgUCV0lgZQAKCRBcs2HlUvsNEHh9EACOm7QH2MGD7gI3
+0VMvapZz4Wfsbda58LFM7G5qPCt10zYfpf0dPJ7tHbHM8N9ENcI7tvH4dTfGsttt
+/uvX9PsiAml6kdfAGxoBRil+76NIHxFWsXSLVDd3hzcnRhc5njimwJa8SDBAp0kx
+v05BVWDvTbZb/b0jdgbqZk2oE0RK8S2Sp1bFkc6fl3pcJYFOQQmelOmXvPmyHOhd
+W2bLX9e1/IulzVf6zgi8dsj9IZ9aLKJY6Cz6VvJ85ML6mLGGwgNvJTLdWqntFFr0
+QqkdM8ZSp9ezWUKo28XGoxDAmo6ENNTLIZjuRlnj1Yr9mmwmf4mgucyqlU93XjCR
+y6u5bpuqoQONRPYCR/UKKk/qoGnYXnhX6AtUD+3JHvrV5mINkd/ad5eR5pviUGz+
+H/VeZqVhMbxxgkm3Gra9+bZ2pCCWboKtqIM7JtXYwks/dttkV5fTqBarJtWzcwO/
+Pv3DreTdnMoVNGzNk/84IeNmGww/iQ1Px0psVCKVPsKxr2RjNhVP7qdA0cTguFNX
+y+hx5Y/JYjSVnxIN74aLoDoeuoBhfYpOY+HiJTaM+pbLfoJr5WUPf/YUQ3qBvgG4
+WXiJUOAgsPmNY//n1MSMyhz1SvmhSXfqCVTb26IyVv0oA3UjLRcKjr18mHB5d9Fr
+NIGVHg8gJjRmXid5BZJZwKQ5niivjokCIgQQAQoADAUCV3uc0wWDB4YfgAAKCRAx
+uBWjAQZ0qe2DEACaq16AaJ2QKtOweqlGk92gQoJ2OCbIW15hW/1660u+X+2CQz8d
+nySXaq22AyBx4Do88b6d54D6TqScyObGJpGroHqAjvyh7v/t/V6oEwe34Ls2qUX2
+77lqfqsz3B0nW/aKZ2oH8ygM3tw0J5y4sAj5bMrxqcwuCs14Fds3v+K2mjsntZCu
+ztHB8mqZp/6v00d0vGGqcl6uVaS04cCQMNUkQ7tGMXlyAEIiH2ksU+/RJLaIqFtg
+klfP3Y7foAY15ymCSQPD9c81+xjbf0XNmBtDreL+rQVtesahU4Pp+Sc23iuXGdY2
+yF13wnGmScojNjM2BoUiffhFeyWBdOTgCFhOEhk0Y1zKrkNqDC0sDAj0B5vhQg/T
+10NLR2MerSk9+MJLHZqFrHXo5f59zUvte/JhtViP5TdO/Yd4ptoEcDspDKLv0FrN
+7xsP8Q6DmBz1doCe06PQS1Z1Sv4UToHRS2RXskUnDc8Cpuex5mDBQO+LV+tNToh4
+ZNcpj9lFHNuaA1qS15X3EVCySZaPyn2WRd6ZisCKtwopRmshVItTTcLmrxu+hHAF
+bVRVFRRSCE8JIZLkWwRyMrcxB2KLBYA+f2nCtD2rqiZ8K8Cr9J1qt2iu5yogCwA/
+ombzzYxWWrt/wD6ixJr5kZwBJZroHB7FkRBcTDIzDFYGBYmClACTvLuOnokCIgQS
+AQoADAUCWKy8/gWDB4YfgAAKCRAkW0txwCm5FmrGD/9lL31LQtn5wxwoZvfEKuMh
+KRw0FDUq59lQpqyMxp7lrZozFUqlH4MLTeEWbFle+R+UbUoVkBnZ/cSvVGwtRVaH
+wUeP9NAqBLtIqt4S0T2T0MW6Ug0DVH7V7uYuFktpv1xmIzcC4gV+LHhp95SPYbWr
+uVMi6ENIMZoEqW9uHOy6n2/nh76dR2NVJiZHt5LbG8YXM/Y+z3XsIenwKQ97YO7x
+yEaM7UdsQSqKVB0isTQXT2wxoA/pDvSyu7jpElD5dOtPPz3r0fQpcQKrq0IMjgcB
+u5X5tQ5uktmmdaAvIwLibUB9A+htFiFP4irSx//Lkn66RLjrSqwtMCsv7wbPvTfc
+fdpcmkR767t1VvjQWj9DBfOMjGJk9eiLkUSHYyQst6ELyVdutAIHRV2GQqfEKJzc
+cD3wKdbaOoABqRVr/ok5Oj0YKSrvk0lW3l8vS/TZXvQppSMdJuaTR8JDy6dGuoKt
+uyFDb0fKf1JU3+Gj3Yy2YEfqX0MjNQsck9pDV647UXXdzF9uh3cYVfPbl+xBYOU9
+d9qRcqMut50AVIxpUepGa4Iw7yOSRPCnPAMNAPSmAdJTaQcRWcUd9LOaZH+ZFLJZ
+mpbvS//jQpoBt++Ir8wl9ZJXICRJcvrQuhCjOSNLFzsNr/wyVLnGwmTjLWoJEA0p
+c0cYtLW6fSGknkvNA7e8LYkCMwQQAQgAHRYhBFI9KC2HD6c70cN9svEo88fgKodF
+BQJZ76NPAAoJEPEo88fgKodFYXwP+wW6F7UpNmKXaddu+aamLTe3uv8OSKUHQbRh
+By1oxfINI7iC+BZl9ycJip0S08JH0F+RZsi1H24+GcP9vGTDgu3z0NcOOD4mPpzM
+jSi2/hbGzh9C84pxRJVLAKrbqCz7YQ6JdNG4RUHW/r0QgKTnTlvikVx7n9QaPrVl
+PsVFU3xv5oQxUHpwNWyvpPGTDiycuaGKekodYhZ0vKzJzfyyaUTgfxvTVVj10jyi
+f+mSfY8YBHhDesgYF1d2CUEPth9z5KC/eDgY7KoWs8ZK6sVL3+tGrnqK/s6jqcsk
+J7Kt4c3k0jU56rUo8+jnu9yUHcBXAjtr1Vz/nwVfqmPzukIF1ZkMqdQqIRtvDyEC
+16yGngMpWEVM3/vIsi2/uUMuGvjEkEmqs2oLK1hf+Y0W6Avq+9fZUQUEk0e4wbpu
+RCqX5OjeQTEEXmAzoMsdAiwFvr1ul+eI/BPy+29OQ77hz3/dotdYYfs1JVkiFUhf
+PJwvpoUOXiA5V56wl3i5tkbRSLRSkLmiLTlCEfClHEK/wwLU4ZKuD5UpW8xL438l
+/Ycnsl7aumnofWoaEREBc1Xbnx9SZbrTT8VctW8XpMVIPxCwJCp/LqHtyEbnptnD
+7QoHtdWexFmQFUIlGaDiaL7nv0BD6RA/HwhVSxU3b3deKDYNpG9QnAzte8KXA9/s
+ejP18gCKiQI4BBMBAgAiBQJVpZ9uAhsvBgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIX
+gAAKCRD3YiFXLFJgnbRfEAC9Uai7Rv20QIDlDogRzd+Vebg4ahyoUdj0CH+nAk40
+RIoq6G26u1e+sdgjpCa8jF6vrx+smpgd1HeJdmpahUX0XN3X9f9qU9oj9A4I1WDa
+lRWJh+tP5WNv2ySy6AwcP9QnjuBMRTnTK27pk1sEMg9oJHK5p+ts8hlSC4SluyMK
+H5NMVy9c+A9yqq9NF6M6d6/ehKfBFFLG9BX+XLBATvf1ZemGVHQusCQebTGv0C0V
+9yqtdPdRWVIEhHxyNHATaVYOafTj/EF0lDxLl6zDT6trRV5n9F1VCEh4Aal8L5Mx
+VPcIZVO7NHT2EkQgn8CvWjV3oKl2GopZF8V4XdJRl90U/WDv/6cmfI08GkzDYBHh
+S8ULWRFwGKobsSTyIvnbk4NtKdnTGyTJCQ8+6i52s+C54PiNgfj2ieNn6oOR7d+b
+NCcG1CdOYY+ZXVOcsjl73UYvtJrO0Rl/NpYERkZ5d/tzw4jZ6FCXgggA/Zxcjk6Y
+1ZvIm8Mt8wLRFH9Nww+FVsCtaCXJLP8DlJLASMD9rl5QS9Ku3u7ZNrr5HWXPHXIT
+X660jglyshch6CWeiUATqjIAzkEQom/kEnOrvJAtkypRJ59vYQOedZ1sFVELMXg2
+UCkD/FwojfnVtjzYaTCeGwFQeqzHmM241iuOmBYPeyTY5veF49aBJA1gEJOQTvBR
+8YkCOQQRAQgAIxYhBDlHZ/sRadXUayJzU3Es9wyw8WURBQJaajQrBYMHhh+AAAoJ
+EHEs9wyw8WURDyEP/iD903EcaiZP68IqUBsdHMxOaxnKZD9H2RTBaTjR6r9UjCOf
+bomXpVzL0dMZw1nHIE7u2VT++5wk+QvcN7epBgOWUb6tNcv3nI3vqMGRR+fKW15V
+J1sUwMOKGC4vlbLRVRWd2bb+oPZWeteOxNIqu/8DHDFHg3LtoYxWbrMYHhvd0ben
+B9GvwoqeBaqAeERKYCEoPZRB5O6ZHccX2HacjwFs4uYvIoRg4WI+ODXVHXCgOVZq
+yRuVAuQUjwkLbKL1vxJ01EWzWwRI6cY9mngFXNTHEkoxNyjzlfpn/YWheRiwpwg+
+ymDL4oj1KHNq06zNl38dZCd0rde3OFNuF904H6D+reYL50YA9lkL9mRtlaiYyo1J
+SOOjdr+qxuelfbLgDSeM75YVSiYiZZO8DWr2Cq/SNp47z4T4Il/yhQ6eAstZOIkF
+KQlBjr+ZtLdUu67sPdgPoT842IwSrRTrirEUd6cyADbRggPHrOoYEooBCrCgDYCM
+K1xxG9f6Q42yvL1zWKollibsvJF8MVwgkWfJJyhLYylmJ8osvX9LNdCJZErVrRTz
+wAM00crp/KIiIDCREEgE+5BiuGdM70gSuy3JXSs78JHA4l2tu1mDBrMxNR+C8lpj
+1pnLFHTfGYwHQSwKm42/JZqbePh6LKblUdS5Np1dl0tk5DDHBluRzhx16H7E
+=lwu7
+-----END PGP PUBLIC KEY BLOCK-----
VOLUME /var/log/nginx
VOLUME /etc/ssl/private
-RUN apt-key adv --no-tty --keyserver hkp://pool.sks-keyservers.net:80 --recv-keys 58118E89F3A912897C070ADBF76221572C52609D || \
- apt-key adv --no-tty --keyserver hkp://pgp.mit.edu:80 --recv-keys 58118E89F3A912897C070ADBF76221572C52609D
+ADD 58118E89F3A912897C070ADBF76221572C52609D.asc /tmp/
+RUN apt-key add --no-tty /tmp/58118E89F3A912897C070ADBF76221572C52609D.asc && \
+ rm -f /tmp/58118E89F3A912897C070ADBF76221572C52609D.asc
RUN mkdir -p /etc/apt/sources.list.d && \
echo deb https://apt.dockerproject.org/repo debian-stretch main > /etc/apt/sources.list.d/docker.list && \
import re
def git_latest_tag():
- gitinfo = subprocess.check_output(
- ['git', 'describe', '--abbrev=0']).strip()
- return str(gitinfo.decode('utf-8'))
+ gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+ gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+ return str(next(iter(gittags)).decode('utf-8'))
def git_timestamp_tag():
gitinfo = subprocess.check_output(