Merge branch '14551-wb-login-as-token'
authorLucas Di Pentima <ldipentima@veritasgenetics.com>
Tue, 4 Dec 2018 20:51:13 +0000 (17:51 -0300)
committerLucas Di Pentima <ldipentima@veritasgenetics.com>
Tue, 4 Dec 2018 20:51:13 +0000 (17:51 -0300)
Closes #14551

Arvados-DCO-1.1-Signed-off-by: Lucas Di Pentima <ldipentima@veritasgenetics.com>

39 files changed:
.licenseignore
apps/workbench/app/controllers/work_units_controller.rb
build/package-testing/test-package-python27-python-arvados-cwl-runner.sh
build/run-build-docker-jobs-image.sh
build/run-build-packages.sh
build/run-library.sh
doc/user/cwl/cwl-extensions.html.textile.liquid
docker/jobs/apt.arvados.org.list
sdk/cli/arvados-cli.gemspec
sdk/cwl/arvados_cwl/__init__.py
sdk/cwl/arvados_cwl/arv-cwl-schema.yml
sdk/cwl/arvados_cwl/arvcontainer.py
sdk/cwl/arvados_cwl/arvworkflow.py
sdk/cwl/arvados_cwl/context.py
sdk/cwl/arvados_cwl/executor.py
sdk/cwl/arvados_cwl/fsaccess.py
sdk/cwl/arvados_cwl/runner.py
sdk/cwl/arvados_cwl/task_queue.py
sdk/cwl/arvados_version.py
sdk/cwl/gittaggers.py
sdk/cwl/setup.py
sdk/cwl/tests/test_fsaccess.py
sdk/cwl/tests/test_submit.py
sdk/cwl/tests/test_tq.py
sdk/cwl/tests/wf/submit_wf_runner_resources.cwl
sdk/pam/arvados_version.py
sdk/python/arvados_version.py
sdk/python/gittaggers.py
sdk/ruby/arvados.gemspec
services/api/app/models/arvados_model.rb
services/api/db/migrate/20180917200000_replace_full_text_indexes.rb [new file with mode: 0644]
services/api/db/structure.sql
services/dockercleaner/arvados_version.py
services/fuse/arvados_version.py
services/login-sync/arvados-login-sync.gemspec
services/nodemanager/arvados_version.py
tools/arvbox/lib/arvbox/docker/58118E89F3A912897C070ADBF76221572C52609D.asc [new file with mode: 0644]
tools/arvbox/lib/arvbox/docker/Dockerfile.base
tools/crunchstat-summary/arvados_version.py

index ba4c2dc38bec8b7c233ae4560c45de281b47a52e..83c81b2fc21ca1db86206d82c2f50cae297b9b9c 100644 (file)
@@ -71,3 +71,4 @@ sdk/R/NAMESPACE
 sdk/R/.Rbuildignore
 sdk/R/ArvadosR.Rproj
 *.Rd
+*.asc
index 8527b4d48cb717b941ab376b68255e917c5797a3..767762c81e3cd3d899bda0b3bce873cc97c390b9 100644 (file)
@@ -85,12 +85,6 @@ class WorkUnitsController < ApplicationController
       attrs['state'] = "Uncommitted"
 
       # required
-      attrs['command'] = ["arvados-cwl-runner",
-                          "--local",
-                          "--api=containers",
-                          "--project-uuid=#{params['work_unit']['owner_uuid']}",
-                          "/var/lib/cwl/workflow.json#main",
-                          "/var/lib/cwl/cwl.input.json"]
       attrs['container_image'] = "arvados/jobs"
       attrs['cwd'] = "/var/spool/cwl"
       attrs['output_path'] = "/var/spool/cwl"
@@ -102,6 +96,7 @@ class WorkUnitsController < ApplicationController
         "API" => true
       }
 
+      keep_cache = 256
       input_defaults = {}
       if wf_json
         main = get_cwl_main(wf_json)
@@ -119,11 +114,22 @@ class WorkUnitsController < ApplicationController
               if hint[:ramMin]
                 runtime_constraints["ram"] = hint[:ramMin] * 1024 * 1024
               end
+              if hint[:keep_cache]
+                keep_cache = hint[:keep_cache]
+              end
             end
           end
         end
       end
 
+      attrs['command'] = ["arvados-cwl-runner",
+                          "--local",
+                          "--api=containers",
+                          "--project-uuid=#{params['work_unit']['owner_uuid']}",
+                          "--collection-keep-cache=#{keep_cache}",
+                          "/var/lib/cwl/workflow.json#main",
+                          "/var/lib/cwl/cwl.input.json"]
+
       # mounts
       mounts = {
         "/var/lib/cwl/cwl.input.json" => {
index 0274c8f45ea520eaeb0e4aa453aaaba92f09d984..e499238d89eb2572af6beb6f9d9a05bce1dd8b31 100755 (executable)
@@ -3,6 +3,10 @@
 #
 # SPDX-License-Identifier: AGPL-3.0
 
+set -e
+
+arvados-cwl-runner --version
+
 exec python <<EOF
 import arvados_cwl
 print "arvados-cwl-runner version", arvados_cwl.__version__
index b1e99fc66b27c40d3ac13542e6f7de76ba37e202..83bb5ae7165cb2cfd11879e85411253472887b26 100755 (executable)
@@ -118,6 +118,11 @@ timer_reset
 # clean up the docker build environment
 cd "$WORKSPACE"
 
+if [[ -z "$ARVADOS_BUILDING_VERSION" ]] && ! [[ -z "$version_tag" ]]; then
+       ARVADOS_BUILDING_VERSION="$version_tag"
+       ARVADOS_BUILDING_ITERATION="1"
+fi
+
 python_sdk_ts=$(cd sdk/python && timestamp_from_git)
 cwl_runner_ts=$(cd sdk/cwl && timestamp_from_git)
 
@@ -130,11 +135,25 @@ fi
 
 echo cwl_runner_version $cwl_runner_version python_sdk_version $python_sdk_version
 
+if [[ "${python_sdk_version}" != "${ARVADOS_BUILDING_VERSION}" ]]; then
+       python_sdk_version="${python_sdk_version}-2"
+else
+       python_sdk_version="${ARVADOS_BUILDING_VERSION}-${ARVADOS_BUILDING_ITERATION}"
+fi
+
+cwl_runner_version_orig=$cwl_runner_version
+
+if [[ "${cwl_runner_version}" != "${ARVADOS_BUILDING_VERSION}" ]]; then
+       cwl_runner_version="${cwl_runner_version}-4"
+else
+       cwl_runner_version="${ARVADOS_BUILDING_VERSION}-${ARVADOS_BUILDING_ITERATION}"
+fi
+
 cd docker/jobs
 docker build $NOCACHE \
-       --build-arg python_sdk_version=${python_sdk_version}-2 \
-       --build-arg cwl_runner_version=${cwl_runner_version}-4 \
-       -t arvados/jobs:$cwl_runner_version .
+       --build-arg python_sdk_version=${python_sdk_version} \
+       --build-arg cwl_runner_version=${cwl_runner_version} \
+       -t arvados/jobs:$cwl_runner_version_orig .
 
 ECODE=$?
 
@@ -157,9 +176,9 @@ if docker --version |grep " 1\.[0-9]\." ; then
     FORCE=-f
 fi
 if ! [[ -z "$version_tag" ]]; then
-    docker tag $FORCE arvados/jobs:$cwl_runner_version arvados/jobs:"$version_tag"
+    docker tag $FORCE arvados/jobs:$cwl_runner_version_orig arvados/jobs:"$version_tag"
 else
-    docker tag $FORCE arvados/jobs:$cwl_runner_version arvados/jobs:latest
+    docker tag $FORCE arvados/jobs:$cwl_runner_version_orig arvados/jobs:latest
 fi
 
 ECODE=$?
@@ -185,7 +204,7 @@ else
         if ! [[ -z "$version_tag" ]]; then
             docker_push arvados/jobs:"$version_tag"
         else
-           docker_push arvados/jobs:$cwl_runner_version
+           docker_push arvados/jobs:$cwl_runner_version_orig
            docker_push arvados/jobs:latest
         fi
         title "upload arvados images finished (`timer`)"
index fe01e4b2ef2528222dfff7d097729b9b5c773b30..73e2e628631bbc71ea84388b37e6ae4d60917734 100755 (executable)
@@ -327,6 +327,28 @@ package_go_binary tools/keep-rsync keep-rsync \
 package_go_binary tools/keep-exercise keep-exercise \
     "Performance testing tool for Arvados Keep"
 
+
+# we need explicit debian_revision values in the dependencies for ruamel.yaml, because we have a package iteration
+# greater than zero. So we parse setup.py, get the ruamel.yaml dependencies, tell fpm not to automatically include
+# them in the package being built, and re-add them manually with an appropriate debian_revision value.
+# See #14552 for the reason for this (nasty) workaround. We use ${ruamel_depends[@]} in a few places further down
+# in this script.
+# Ward, 2018-11-28
+IFS=', ' read -r -a deps <<< `grep ruamel.yaml $WORKSPACE/sdk/python/setup.py |cut -f 3 -dl |sed -e "s/'//g"`
+declare -a ruamel_depends=()
+for i in ${deps[@]}; do
+  i=`echo "$i" | sed -e 's!\([0-9]\)! \1!'`
+  if [[ $i =~ .*\>.* ]]; then
+    ruamel_depends+=(--depends "python-ruamel.yaml $i-1")
+  elif [[ $i =~ .*\<.* ]]; then
+    ruamel_depends+=(--depends "python-ruamel.yaml $i-9")
+  else
+    echo "Encountered ruamel dependency that I can't parse. Aborting..."
+    exit 1
+  fi
+done
+
+
 # The Python SDK
 # Please resist the temptation to add --no-python-fix-name to the fpm call here
 # (which would remove the python- prefix from the package name), because this
@@ -339,7 +361,8 @@ rm -rf "$WORKSPACE/sdk/python/build"
 arvados_python_client_version=${ARVADOS_BUILDING_VERSION:-$(awk '($1 == "Version:"){print $2}' $WORKSPACE/sdk/python/arvados_python_client.egg-info/PKG-INFO)}
 test_package_presence ${PYTHON2_PKG_PREFIX}-arvados-python-client "$arvados_python_client_version" python
 if [[ "$?" == "0" ]]; then
-  fpm_build $WORKSPACE/sdk/python "${PYTHON2_PKG_PREFIX}-arvados-python-client" 'Curoverse, Inc.' 'python' "$arvados_python_client_version" "--url=https://arvados.org" "--description=The Arvados Python SDK" --depends "${PYTHON2_PKG_PREFIX}-setuptools" --deb-recommends=git
+
+  fpm_build $WORKSPACE/sdk/python "${PYTHON2_PKG_PREFIX}-arvados-python-client" 'Curoverse, Inc.' 'python' "$arvados_python_client_version" "--url=https://arvados.org" "--description=The Arvados Python SDK" --depends "${PYTHON2_PKG_PREFIX}-setuptools" --deb-recommends=git  --python-disable-dependency ruamel.yaml "${ruamel_depends[@]}"
 fi
 
 # cwl-runner
@@ -355,7 +378,7 @@ else
 fi
 test_package_presence ${PYTHON2_PKG_PREFIX}-arvados-cwl-runner "$arvados_cwl_runner_version" python "$arvados_cwl_runner_iteration"
 if [[ "$?" == "0" ]]; then
-  fpm_build $WORKSPACE/sdk/cwl "${PYTHON2_PKG_PREFIX}-arvados-cwl-runner" 'Curoverse, Inc.' 'python' "$arvados_cwl_runner_version" "--url=https://arvados.org" "--description=The Arvados CWL runner" --depends "${PYTHON2_PKG_PREFIX}-setuptools" --depends "${PYTHON2_PKG_PREFIX}-subprocess32 >= 3.5.0" --depends "${PYTHON2_PKG_PREFIX}-pathlib2" --depends "${PYTHON2_PKG_PREFIX}-scandir" "${iterargs[@]}"
+  fpm_build $WORKSPACE/sdk/cwl "${PYTHON2_PKG_PREFIX}-arvados-cwl-runner" 'Curoverse, Inc.' 'python' "$arvados_cwl_runner_version" "--url=https://arvados.org" "--description=The Arvados CWL runner" --depends "${PYTHON2_PKG_PREFIX}-setuptools" --depends "${PYTHON2_PKG_PREFIX}-subprocess32 >= 3.5.0" --depends "${PYTHON2_PKG_PREFIX}-pathlib2" --depends "${PYTHON2_PKG_PREFIX}-scandir" --python-disable-dependency ruamel.yaml "${ruamel_depends[@]}" "${iterargs[@]}"
 fi
 
 # schema_salad. This is a python dependency of arvados-cwl-runner,
@@ -381,9 +404,9 @@ fi
 
 # And for cwltool we have the same problem as for schema_salad. Ward, 2016-03-17
 cwltoolversion=$(cat "$WORKSPACE/sdk/cwl/setup.py" | grep cwltool== | sed "s/.*==\(.*\)'.*/\1/")
-test_package_presence python-cwltool "$cwltoolversion" python 2
+test_package_presence python-cwltool "$cwltoolversion" python 3
 if [[ "$?" == "0" ]]; then
-  fpm_build cwltool "" "" python $cwltoolversion --iteration 2
+  fpm_build cwltool "" "" python $cwltoolversion --iteration 3 --python-disable-dependency ruamel.yaml "${ruamel_depends[@]}"
 fi
 
 # The PAM module
index 8ba14949d3c0847acaaa8c2fe3671a513c7668de..b595cc8a06ee1ff8563289e7f197c00bd0fa963e 100755 (executable)
@@ -60,7 +60,7 @@ version_from_git() {
     fi
 
     declare $(format_last_commit_here "git_ts=%ct git_hash=%h")
-    ARVADOS_BUILDING_VERSION="$(git describe --abbrev=0).$(date -ud "@$git_ts" +%Y%m%d%H%M%S)"
+    ARVADOS_BUILDING_VERSION="$(git tag -l |sort -V -r |head -n1).$(date -ud "@$git_ts" +%Y%m%d%H%M%S)"
     echo "$ARVADOS_BUILDING_VERSION"
 }
 
index 7abc794e198d6910eb64bbf88da79d218c7e2b25..d62002237a7e7b1d43aa7c59f4ef1afa7bc38b84 100644 (file)
@@ -43,6 +43,7 @@ hints:
   arv:WorkflowRunnerResources:
     ramMin: 2048
     coresMin: 2
+    keep_cache: 512
   arv:ClusterTarget:
     cluster_id: clsr1
     project_uuid: clsr1-j7d0g-qxc4jcji7n4lafx
@@ -137,6 +138,7 @@ table(table table-bordered table-condensed).
 |_. Field |_. Type |_. Description |
 |ramMin|int|RAM, in mebibytes, to reserve for the arvados-cwl-runner process. Default 1 GiB|
 |coresMin|int|Number of cores to reserve to the arvados-cwl-runner process. Default 1 core.|
+|keep_cache|int|Size of collection metadata cache for the workflow runner, in MiB.  Default 256 MiB.  Will be added on to the RAM request when determining node size to request.|
 
 h2(#clustertarget). arv:ClusterTarget
 
index ae1a0862a67437e257fa1fdec2919473799ac56c..11b98e25bb0f3905ef299c1bc1d1a94f08c4a664 100644 (file)
@@ -1,3 +1,2 @@
 # apt.arvados.org
-deb http://apt.arvados.org/ jessie main
 deb http://apt.arvados.org/ jessie-dev main
index 723b5166865ab6b272dbb885b92f73c009125141..80abc9c497f2da9f0f72ebd022e47ae4fb07a14e 100644 (file)
@@ -7,7 +7,7 @@ if not File.exist?('/usr/bin/git') then
   exit
 end
 
-git_latest_tag = `git describe --abbrev=0`
+git_latest_tag = `git tag -l |sort -V -r |head -n1`
 git_latest_tag = git_latest_tag.encode('utf-8').strip
 git_timestamp, git_hash = `git log -n1 --first-parent --format=%ct:%H .`.chomp.split(":")
 git_timestamp = Time.at(git_timestamp.to_i).utc
index 9b814f534c11af95aa59b77cdb3eaaa737866195..7e149528308fc9c6e38e0021af858da5450b58f8 100644 (file)
@@ -159,6 +159,10 @@ def arg_parser():  # type: () -> argparse.ArgumentParser
                          default=None,
                          metavar="CLUSTER_ID")
 
+    parser.add_argument("--collection-cache-size", type=int,
+                        default=None,
+                        help="Collection cache size (in MiB, default 256).")
+
     parser.add_argument("--name", type=str,
                         help="Name to use for workflow execution instance.",
                         default=None)
@@ -191,7 +195,7 @@ def arg_parser():  # type: () -> argparse.ArgumentParser
                         help=argparse.SUPPRESS)
 
     parser.add_argument("--thread-count", type=int,
-                        default=4, help="Number of threads to use for job submit and output collection.")
+                        default=1, help="Number of threads to use for job submit and output collection.")
 
     parser.add_argument("--http-timeout", type=int,
                         default=5*60, dest="http_timeout", help="API request timeout in seconds. Default is 300 seconds (5 minutes).")
index 902b1ffba299240438c60c8a0a866db598b2a101..dce1bd4d0247d2f56af8902f844814633b739b25 100644 (file)
@@ -233,6 +233,13 @@ $graph:
       type: int?
       doc: Minimum cores allocated to cwl-runner
       jsonldPredicate: "https://w3id.org/cwl/cwl#ResourceRequirement/coresMin"
+    keep_cache:
+      type: int?
+      doc: |
+        Size of collection metadata cache for the workflow runner, in
+        MiB.  Default 256 MiB.  Will be added on to the RAM request
+        when determining node size to request.
+      jsonldPredicate: "http://arvados.org/cwl#RuntimeConstraints/keep_cache"
 
 - name: ClusterTarget
   type: record
index f1ae65fc0f97e4b29f69fed0ca1fa3cf9ac78de2..4c49a449b2a68fdf1eaaa5cd674129ae257dfc6e 100644 (file)
@@ -407,7 +407,7 @@ class RunnerContainer(Runner):
             "secret_mounts": secret_mounts,
             "runtime_constraints": {
                 "vcpus": math.ceil(self.submit_runner_cores),
-                "ram": math.ceil(1024*1024 * self.submit_runner_ram),
+                "ram": 1024*1024 * (math.ceil(self.submit_runner_ram) + math.ceil(self.collection_cache_size)),
                 "API": True
             },
             "use_existing": self.enable_reuse,
@@ -441,6 +441,7 @@ class RunnerContainer(Runner):
         # --eval-timeout is the timeout for javascript invocation
         # --parallel-task-count is the number of threads to use for job submission
         # --enable/disable-reuse sets desired job reuse
+        # --collection-cache-size sets aside memory to store collections
         command = ["arvados-cwl-runner",
                    "--local",
                    "--api=containers",
@@ -448,7 +449,8 @@ class RunnerContainer(Runner):
                    "--disable-validate",
                    "--eval-timeout=%s" % self.arvrunner.eval_timeout,
                    "--thread-count=%s" % self.arvrunner.thread_count,
-                   "--enable-reuse" if self.enable_reuse else "--disable-reuse"]
+                   "--enable-reuse" if self.enable_reuse else "--disable-reuse",
+                   "--collection-cache-size=%s" % self.collection_cache_size]
 
         if self.output_name:
             command.append("--output-name=" + self.output_name)
index f514476b9099895d3739dbb34ab0a0c372bd7e73..eb78a25fedbd4754752ff8598d7e1faa6b1585db 100644 (file)
@@ -155,7 +155,7 @@ class ArvadosWorkflow(Workflow):
 
     def job(self, joborder, output_callback, runtimeContext):
 
-        builder = self._init_job(joborder, runtimeContext)
+        builder = make_builder(joborder, self.hints, self.requirements, runtimeContext)
         runtimeContext = set_cluster_target(self.tool, self.arvrunner, builder, runtimeContext)
 
         req, _ = self.get_requirement("http://arvados.org/cwl#RunInSingleContainer")
index 7831e1cfd0822abbcac5a77c460a33e8ff492714..8cfe22ad7b6619f1f02d95eaf71153e44e52fd01 100644 (file)
@@ -34,6 +34,7 @@ class ArvRuntimeContext(RuntimeContext):
         self.submit_runner_cluster = None
         self.cluster_target_id = 0
         self.always_submit_runner = False
+        self.collection_cache_size = 256
 
         super(ArvRuntimeContext, self).__init__(kwargs)
 
index 6cac709260fcef6f464d5d7e54706305e883a685..9595b55915477b6cb8beb858e8cc8e4b89f3d603 100644 (file)
@@ -29,7 +29,7 @@ from .arvjob import RunnerJob, RunnerTemplate
 from .runner import Runner, upload_docker, upload_job_order, upload_workflow_deps
 from .arvtool import ArvadosCommandTool, validate_cluster_target
 from .arvworkflow import ArvadosWorkflow, upload_workflow
-from .fsaccess import CollectionFsAccess, CollectionFetcher, collectionResolver, CollectionCache
+from .fsaccess import CollectionFsAccess, CollectionFetcher, collectionResolver, CollectionCache, pdh_size
 from .perf import Perf
 from .pathmapper import NoFollowPathMapper
 from .task_queue import TaskQueue
@@ -37,7 +37,7 @@ from .context import ArvLoadingContext, ArvRuntimeContext
 from ._version import __version__
 
 from cwltool.process import shortname, UnsupportedRequirement, use_custom_schema
-from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, get_listing
+from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, get_listing, visit_class
 from cwltool.command_line_tool import compute_checksums
 
 logger = logging.getLogger('arvados.cwl-runner')
@@ -95,6 +95,7 @@ class ArvCwlExecutor(object):
             arvargs.output_name = None
             arvargs.output_tags = None
             arvargs.thread_count = 1
+            arvargs.collection_cache_size = None
 
         self.api = api_client
         self.processes = {}
@@ -116,13 +117,21 @@ class ArvCwlExecutor(object):
         self.thread_count = arvargs.thread_count
         self.poll_interval = 12
         self.loadingContext = None
+        self.should_estimate_cache_size = True
 
         if keep_client is not None:
             self.keep_client = keep_client
         else:
             self.keep_client = arvados.keep.KeepClient(api_client=self.api, num_retries=self.num_retries)
 
-        self.collection_cache = CollectionCache(self.api, self.keep_client, self.num_retries)
+        if arvargs.collection_cache_size:
+            collection_cache_size = arvargs.collection_cache_size*1024*1024
+            self.should_estimate_cache_size = False
+        else:
+            collection_cache_size = 256*1024*1024
+
+        self.collection_cache = CollectionCache(self.api, self.keep_client, self.num_retries,
+                                                cap=collection_cache_size)
 
         self.fetcher_constructor = partial(CollectionFetcher,
                                            api_client=self.api,
@@ -206,7 +215,8 @@ http://doc.arvados.org/install/install-api-server.html#disable_api_methods
 
 
     def start_run(self, runnable, runtimeContext):
-        self.task_queue.add(partial(runnable.run, runtimeContext))
+        self.task_queue.add(partial(runnable.run, runtimeContext),
+                            self.workflow_eval_lock, self.stop_polling)
 
     def process_submitted(self, container):
         with self.workflow_eval_lock:
@@ -216,7 +226,8 @@ http://doc.arvados.org/install/install-api-server.html#disable_api_methods
         with self.workflow_eval_lock:
             j = self.processes[uuid]
             logger.info("%s %s is %s", self.label(j), uuid, record["state"])
-            self.task_queue.add(partial(j.done, record))
+            self.task_queue.add(partial(j.done, record),
+                                self.workflow_eval_lock, self.stop_polling)
             del self.processes[uuid]
 
     def runtime_status_update(self, kind, message, detail=None):
@@ -584,6 +595,21 @@ http://doc.arvados.org/install/install-api-server.html#disable_api_methods
         if runtimeContext.priority < 1 or runtimeContext.priority > 1000:
             raise Exception("--priority must be in the range 1..1000.")
 
+        if self.should_estimate_cache_size:
+            visited = set()
+            estimated_size = [0]
+            def estimate_collection_cache(obj):
+                if obj.get("location", "").startswith("keep:"):
+                    m = pdh_size.match(obj["location"][5:])
+                    if m and m.group(1) not in visited:
+                        visited.add(m.group(1))
+                        estimated_size[0] += int(m.group(2))
+            visit_class(job_order, ("File", "Directory"), estimate_collection_cache)
+            runtimeContext.collection_cache_size = max(((estimated_size[0]*192) / (1024*1024))+1, 256)
+            self.collection_cache.set_cap(runtimeContext.collection_cache_size*1024*1024)
+
+        logger.info("Using collection cache size %s MiB", runtimeContext.collection_cache_size)
+
         runnerjob = None
         if runtimeContext.submit:
             # Submit a runner job to run the workflow for us.
@@ -604,7 +630,9 @@ http://doc.arvados.org/install/install-api-server.html#disable_api_methods
                                                 intermediate_output_ttl=runtimeContext.intermediate_output_ttl,
                                                 merged_map=merged_map,
                                                 priority=runtimeContext.priority,
-                                                secret_store=self.secret_store)
+                                                secret_store=self.secret_store,
+                                                collection_cache_size=runtimeContext.collection_cache_size,
+                                                collection_cache_is_default=self.should_estimate_cache_size)
             elif self.work_api == "jobs":
                 runnerjob = RunnerJob(self, tool, job_order, runtimeContext.enable_reuse,
                                       self.output_name,
@@ -676,6 +704,10 @@ http://doc.arvados.org/install/install-api-server.html#disable_api_methods
                     else:
                         logger.error("Workflow is deadlocked, no runnable processes and not waiting on any pending processes.")
                         break
+
+                if self.stop_polling.is_set():
+                    break
+
                 loopperf.__enter__()
             loopperf.__exit__()
 
index 5981268128486496f22d28b4928d0b90b5775e7e..0816ee8fc05b74198ae9abad69887905bf8113ee 100644 (file)
@@ -28,6 +28,8 @@ from schema_salad.ref_resolver import DefaultFetcher
 
 logger = logging.getLogger('arvados.cwl-runner')
 
+pdh_size = re.compile(r'([0-9a-f]{32})\+(\d+)(\+\S+)*')
+
 class CollectionCache(object):
     def __init__(self, api_client, keep_client, num_retries,
                  cap=256*1024*1024,
@@ -41,20 +43,26 @@ class CollectionCache(object):
         self.cap = cap
         self.min_entries = min_entries
 
-    def cap_cache(self):
-        if self.total > self.cap:
-            # ordered list iterates from oldest to newest
-            for pdh, v in self.collections.items():
-                if self.total < self.cap or len(self.collections) < self.min_entries:
-                    break
-                # cut it loose
-                logger.debug("Evicting collection reader %s from cache", pdh)
-                del self.collections[pdh]
-                self.total -= v[1]
+    def set_cap(self, cap):
+        self.cap = cap
+
+    def cap_cache(self, required):
+        # ordered dict iterates from oldest to newest
+        for pdh, v in self.collections.items():
+            available = self.cap - self.total
+            if available >= required or len(self.collections) < self.min_entries:
+                return
+            # cut it loose
+            logger.debug("Evicting collection reader %s from cache (cap %s total %s required %s)", pdh, self.cap, self.total, required)
+            del self.collections[pdh]
+            self.total -= v[1]
 
     def get(self, pdh):
         with self.lock:
             if pdh not in self.collections:
+                m = pdh_size.match(pdh)
+                if m:
+                    self.cap_cache(int(m.group(2)) * 128)
                 logger.debug("Creating collection reader for %s", pdh)
                 cr = arvados.collection.CollectionReader(pdh, api_client=self.api_client,
                                                          keep_client=self.keep_client,
@@ -62,7 +70,6 @@ class CollectionCache(object):
                 sz = len(cr.manifest_text()) * 128
                 self.collections[pdh] = (cr, sz)
                 self.total += sz
-                self.cap_cache()
             else:
                 cr, sz = self.collections[pdh]
                 # bump it to the back
index a846f2b0016931dcd6a938c47f9572083963d2bc..6094cfe245872b1b58976901668bd80a8b5b91b0 100644 (file)
@@ -364,7 +364,9 @@ class Runner(object):
                  output_name, output_tags, submit_runner_ram=0,
                  name=None, on_error=None, submit_runner_image=None,
                  intermediate_output_ttl=0, merged_map=None,
-                 priority=None, secret_store=None):
+                 priority=None, secret_store=None,
+                 collection_cache_size=256,
+                 collection_cache_is_default=True):
         self.arvrunner = runner
         self.tool = tool
         self.job_order = job_order
@@ -389,6 +391,7 @@ class Runner(object):
 
         self.submit_runner_cores = 1
         self.submit_runner_ram = 1024  # defaut 1 GiB
+        self.collection_cache_size = collection_cache_size
 
         runner_resource_req, _ = self.tool.get_requirement("http://arvados.org/cwl#WorkflowRunnerResources")
         if runner_resource_req:
@@ -396,6 +399,8 @@ class Runner(object):
                 self.submit_runner_cores = runner_resource_req["coresMin"]
             if runner_resource_req.get("ramMin"):
                 self.submit_runner_ram = runner_resource_req["ramMin"]
+            if runner_resource_req.get("keep_cache") and collection_cache_is_default:
+                self.collection_cache_size = runner_resource_req["keep_cache"]
 
         if submit_runner_ram:
             # Command line / initializer overrides default and/or spec from workflow
index b9fd09807b452c1b06738ef1a7df72fd9dcc8708..1c233fac0ad98f4b0421a4e0856b00fd19d1422f 100644 (file)
@@ -11,7 +11,7 @@ logger = logging.getLogger('arvados.cwl-runner')
 class TaskQueue(object):
     def __init__(self, lock, thread_count):
         self.thread_count = thread_count
-        self.task_queue = Queue.Queue()
+        self.task_queue = Queue.Queue(maxsize=self.thread_count)
         self.task_queue_threads = []
         self.lock = lock
         self.in_flight = 0
@@ -23,27 +23,39 @@ class TaskQueue(object):
             t.start()
 
     def task_queue_func(self):
+        while True:
+            task = self.task_queue.get()
+            if task is None:
+                return
+            try:
+                task()
+            except Exception as e:
+                logger.exception("Unhandled exception running task")
+                self.error = e
 
-            while True:
-                task = self.task_queue.get()
-                if task is None:
-                    return
-                try:
-                    task()
-                except Exception as e:
-                    logger.exception("Unhandled exception running task")
-                    self.error = e
-
-                with self.lock:
-                    self.in_flight -= 1
-
-    def add(self, task):
-        with self.lock:
-            if self.thread_count > 1:
+            with self.lock:
+                self.in_flight -= 1
+
+    def add(self, task, unlock, check_done):
+        if self.thread_count > 1:
+            with self.lock:
                 self.in_flight += 1
-                self.task_queue.put(task)
-            else:
-                task()
+        else:
+            task()
+            return
+
+        while True:
+            try:
+                unlock.release()
+                if check_done.is_set():
+                    return
+                self.task_queue.put(task, block=True, timeout=3)
+                return
+            except Queue.Full:
+                pass
+            finally:
+                unlock.acquire()
+
 
     def drain(self):
         try:
index 88cf1ed7caa1da04fd5a1794c616cd5a0f2039b3..d13dd5ec538e678268d7b79836d745ba89d46047 100644 (file)
@@ -10,9 +10,9 @@ import re
 SETUP_DIR = os.path.dirname(__file__) or '.'
 
 def git_latest_tag():
-    gitinfo = subprocess.check_output(
-        ['git', 'describe', '--abbrev=0']).strip()
-    return str(gitinfo.decode('utf-8'))
+    gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+    gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+    return str(next(iter(gittags)).decode('utf-8'))
 
 def choose_version_from():
     sdk_ts = subprocess.check_output(
index 8ccb6645de8c78ccf77d3e049fa1b1e6257e5c91..4dc8448476123934dae7193fe680141671a2b7ec 100644 (file)
@@ -29,9 +29,9 @@ class EggInfoFromGit(egg_info):
     from source package), leave it alone.
     """
     def git_latest_tag(self):
-        gitinfo = subprocess.check_output(
-            ['git', 'describe', '--abbrev=0']).strip()
-        return str(gitinfo.decode('utf-8'))
+        gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+        gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+        return str(next(iter(gittags)).decode('utf-8'))
 
     def git_timestamp_tag(self):
         gitinfo = subprocess.check_output(
index 5d373282b6ca8ba3129b28c78e1bbe8934fda005..9d25a562ab32d09dcdfba627fc2089260879cce1 100644 (file)
@@ -37,7 +37,7 @@ setup(name='arvados-cwl-runner',
           'schema-salad==2.7.20181116024232',
           'typing >= 3.6.4',
           'ruamel.yaml >=0.15.54, <=0.15.77',
-          'arvados-python-client>=1.1.4.20180607143841',
+          'arvados-python-client>=1.2.1.20181130020805',
           'setuptools',
           'ciso8601 >=1.0.6, <2.0.0',
           'subprocess32>=3.5.1',
index d52e948710188dfb16e5ce175f5eb317138c7449..f83612a8b01186d822eb00728a76d31569408ced 100644 (file)
@@ -36,34 +36,34 @@ class TestFsAccess(unittest.TestCase):
         cache = CollectionCache(mock.MagicMock(), mock.MagicMock(), 4)
         cr().manifest_text.return_value = 'x' * 524289
         self.assertEqual(0, cache.total)
-        c1 = cache.get("99999999999999999999999999999991+99")
-        self.assertIn("99999999999999999999999999999991+99", cache.collections)
-        self.assertNotIn("99999999999999999999999999999992+99", cache.collections)
+        c1 = cache.get("99999999999999999999999999999991+524289")
+        self.assertIn("99999999999999999999999999999991+524289", cache.collections)
+        self.assertNotIn("99999999999999999999999999999992+524289", cache.collections)
         self.assertEqual((524289*128)*1, cache.total)
 
-        c2 = cache.get("99999999999999999999999999999992+99")
-        self.assertIn("99999999999999999999999999999991+99", cache.collections)
-        self.assertIn("99999999999999999999999999999992+99", cache.collections)
+        c2 = cache.get("99999999999999999999999999999992+524289")
+        self.assertIn("99999999999999999999999999999991+524289", cache.collections)
+        self.assertIn("99999999999999999999999999999992+524289", cache.collections)
         self.assertEqual((524289*128)*2, cache.total)
 
-        c1 = cache.get("99999999999999999999999999999991+99")
-        self.assertIn("99999999999999999999999999999991+99", cache.collections)
-        self.assertIn("99999999999999999999999999999992+99", cache.collections)
+        c1 = cache.get("99999999999999999999999999999991+524289")
+        self.assertIn("99999999999999999999999999999991+524289", cache.collections)
+        self.assertIn("99999999999999999999999999999992+524289", cache.collections)
         self.assertEqual((524289*128)*2, cache.total)
 
-        c3 = cache.get("99999999999999999999999999999993+99")
-        self.assertIn("99999999999999999999999999999991+99", cache.collections)
-        self.assertIn("99999999999999999999999999999992+99", cache.collections)
+        c3 = cache.get("99999999999999999999999999999993+524289")
+        self.assertIn("99999999999999999999999999999991+524289", cache.collections)
+        self.assertIn("99999999999999999999999999999992+524289", cache.collections)
         self.assertEqual((524289*128)*3, cache.total)
 
-        c4 = cache.get("99999999999999999999999999999994+99")
-        self.assertIn("99999999999999999999999999999991+99", cache.collections)
-        self.assertNotIn("99999999999999999999999999999992+99", cache.collections)
+        c4 = cache.get("99999999999999999999999999999994+524289")
+        self.assertIn("99999999999999999999999999999991+524289", cache.collections)
+        self.assertNotIn("99999999999999999999999999999992+524289", cache.collections)
         self.assertEqual((524289*128)*3, cache.total)
 
-        c5 = cache.get("99999999999999999999999999999995+99")
-        self.assertNotIn("99999999999999999999999999999991+99", cache.collections)
-        self.assertNotIn("99999999999999999999999999999992+99", cache.collections)
+        c5 = cache.get("99999999999999999999999999999995+524289")
+        self.assertNotIn("99999999999999999999999999999991+524289", cache.collections)
+        self.assertNotIn("99999999999999999999999999999992+524289", cache.collections)
         self.assertEqual((524289*128)*3, cache.total)
 
 
@@ -72,37 +72,37 @@ class TestFsAccess(unittest.TestCase):
         cache = CollectionCache(mock.MagicMock(), mock.MagicMock(), 4)
         cr().manifest_text.return_value = 'x' * 524287
         self.assertEqual(0, cache.total)
-        c1 = cache.get("99999999999999999999999999999991+99")
-        self.assertIn("99999999999999999999999999999991+99", cache.collections)
-        self.assertNotIn("99999999999999999999999999999992+99", cache.collections)
+        c1 = cache.get("99999999999999999999999999999991+524287")
+        self.assertIn("99999999999999999999999999999991+524287", cache.collections)
+        self.assertNotIn("99999999999999999999999999999992+524287", cache.collections)
         self.assertEqual((524287*128)*1, cache.total)
 
-        c2 = cache.get("99999999999999999999999999999992+99")
-        self.assertIn("99999999999999999999999999999991+99", cache.collections)
-        self.assertIn("99999999999999999999999999999992+99", cache.collections)
+        c2 = cache.get("99999999999999999999999999999992+524287")
+        self.assertIn("99999999999999999999999999999991+524287", cache.collections)
+        self.assertIn("99999999999999999999999999999992+524287", cache.collections)
         self.assertEqual((524287*128)*2, cache.total)
 
-        c1 = cache.get("99999999999999999999999999999991+99")
-        self.assertIn("99999999999999999999999999999991+99", cache.collections)
-        self.assertIn("99999999999999999999999999999992+99", cache.collections)
+        c1 = cache.get("99999999999999999999999999999991+524287")
+        self.assertIn("99999999999999999999999999999991+524287", cache.collections)
+        self.assertIn("99999999999999999999999999999992+524287", cache.collections)
         self.assertEqual((524287*128)*2, cache.total)
 
-        c3 = cache.get("99999999999999999999999999999993+99")
-        self.assertIn("99999999999999999999999999999991+99", cache.collections)
-        self.assertIn("99999999999999999999999999999992+99", cache.collections)
+        c3 = cache.get("99999999999999999999999999999993+524287")
+        self.assertIn("99999999999999999999999999999991+524287", cache.collections)
+        self.assertIn("99999999999999999999999999999992+524287", cache.collections)
         self.assertEqual((524287*128)*3, cache.total)
 
-        c4 = cache.get("99999999999999999999999999999994+99")
-        self.assertIn("99999999999999999999999999999991+99", cache.collections)
-        self.assertIn("99999999999999999999999999999992+99", cache.collections)
+        c4 = cache.get("99999999999999999999999999999994+524287")
+        self.assertIn("99999999999999999999999999999991+524287", cache.collections)
+        self.assertIn("99999999999999999999999999999992+524287", cache.collections)
         self.assertEqual((524287*128)*4, cache.total)
 
-        c5 = cache.get("99999999999999999999999999999995+99")
-        self.assertIn("99999999999999999999999999999991+99", cache.collections)
-        self.assertNotIn("99999999999999999999999999999992+99", cache.collections)
+        c5 = cache.get("99999999999999999999999999999995+524287")
+        self.assertIn("99999999999999999999999999999991+524287", cache.collections)
+        self.assertNotIn("99999999999999999999999999999992+524287", cache.collections)
         self.assertEqual((524287*128)*4, cache.total)
 
-        c6 = cache.get("99999999999999999999999999999996+99")
-        self.assertNotIn("99999999999999999999999999999991+99", cache.collections)
-        self.assertNotIn("99999999999999999999999999999992+99", cache.collections)
+        c6 = cache.get("99999999999999999999999999999996+524287")
+        self.assertNotIn("99999999999999999999999999999991+524287", cache.collections)
+        self.assertNotIn("99999999999999999999999999999992+524287", cache.collections)
         self.assertEqual((524287*128)*4, cache.total)
index 1b892a9836f209857995fbf4e94002e015dcd1cf..55164446bdc54a5b81f3ca8d27284fc351e338c3 100644 (file)
@@ -273,8 +273,8 @@ def stubs(func):
             'state': 'Committed',
             'command': ['arvados-cwl-runner', '--local', '--api=containers',
                         '--no-log-timestamps', '--disable-validate',
-                        '--eval-timeout=20', '--thread-count=4',
-                        '--enable-reuse', '--debug', '--on-error=continue',
+                        '--eval-timeout=20', '--thread-count=1',
+                        '--enable-reuse', "--collection-cache-size=256", '--debug', '--on-error=continue',
                         '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json'],
             'name': 'submit_wf.cwl',
             'container_image': '999999999999999999999999999999d3+99',
@@ -283,7 +283,7 @@ def stubs(func):
             'runtime_constraints': {
                 'API': True,
                 'vcpus': 1,
-                'ram': 1024*1024*1024
+                'ram': (1024+256)*1024*1024
             },
             'use_existing': True,
             'properties': {},
@@ -558,8 +558,9 @@ class TestSubmit(unittest.TestCase):
         expect_container["command"] = [
             'arvados-cwl-runner', '--local', '--api=containers',
             '--no-log-timestamps', '--disable-validate',
-            '--eval-timeout=20', '--thread-count=4',
-            '--disable-reuse', '--debug', '--on-error=continue',
+            '--eval-timeout=20', '--thread-count=1',
+            '--disable-reuse', "--collection-cache-size=256",
+            '--debug', '--on-error=continue',
             '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
         expect_container["use_existing"] = False
 
@@ -583,8 +584,8 @@ class TestSubmit(unittest.TestCase):
         expect_container["command"] = [
             'arvados-cwl-runner', '--local', '--api=containers',
             '--no-log-timestamps', '--disable-validate',
-            '--eval-timeout=20', '--thread-count=4',
-            '--disable-reuse', '--debug', '--on-error=continue',
+            '--eval-timeout=20', '--thread-count=1',
+            '--disable-reuse', "--collection-cache-size=256", '--debug', '--on-error=continue',
             '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
         expect_container["use_existing"] = False
         expect_container["name"] = "submit_wf_no_reuse.cwl"
@@ -620,8 +621,9 @@ class TestSubmit(unittest.TestCase):
         expect_container = copy.deepcopy(stubs.expect_container_spec)
         expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
                                        '--no-log-timestamps', '--disable-validate',
-                                       '--eval-timeout=20', '--thread-count=4',
-                                       '--enable-reuse', '--debug', '--on-error=stop',
+                                       '--eval-timeout=20', '--thread-count=1',
+                                       '--enable-reuse', "--collection-cache-size=256",
+                                       '--debug', '--on-error=stop',
                                        '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
 
         stubs.api.container_requests().create.assert_called_with(
@@ -646,8 +648,8 @@ class TestSubmit(unittest.TestCase):
         expect_container = copy.deepcopy(stubs.expect_container_spec)
         expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
                                        '--no-log-timestamps', '--disable-validate',
-                                       '--eval-timeout=20', '--thread-count=4',
-                                       '--enable-reuse',
+                                       '--eval-timeout=20', '--thread-count=1',
+                                       '--enable-reuse', "--collection-cache-size=256",
                                        "--output-name="+output_name, '--debug', '--on-error=continue',
                                        '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
         expect_container["output_name"] = output_name
@@ -672,8 +674,8 @@ class TestSubmit(unittest.TestCase):
         expect_container = copy.deepcopy(stubs.expect_container_spec)
         expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
                                        '--no-log-timestamps', '--disable-validate',
-                                       '--eval-timeout=20', '--thread-count=4',
-                                       '--enable-reuse', "--debug",
+                                       '--eval-timeout=20', '--thread-count=1',
+                                       '--enable-reuse', "--collection-cache-size=256", "--debug",
                                        "--storage-classes=foo", '--on-error=continue',
                                        '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
 
@@ -739,8 +741,9 @@ class TestSubmit(unittest.TestCase):
         expect_container = copy.deepcopy(stubs.expect_container_spec)
         expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
                                        '--no-log-timestamps', '--disable-validate',
-                                       '--eval-timeout=20', '--thread-count=4',
-                                       '--enable-reuse', '--debug', '--on-error=continue',
+                                       '--eval-timeout=20', '--thread-count=1',
+                                       '--enable-reuse', "--collection-cache-size=256", '--debug',
+                                       '--on-error=continue',
                                        "--intermediate-output-ttl=3600",
                                        '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
 
@@ -764,8 +767,9 @@ class TestSubmit(unittest.TestCase):
         expect_container = copy.deepcopy(stubs.expect_container_spec)
         expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
                                        '--no-log-timestamps', '--disable-validate',
-                                       '--eval-timeout=20', '--thread-count=4',
-                                       '--enable-reuse', '--debug', '--on-error=continue',
+                                       '--eval-timeout=20', '--thread-count=1',
+                                       '--enable-reuse', "--collection-cache-size=256",
+                                       '--debug', '--on-error=continue',
                                        "--trash-intermediate",
                                        '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
 
@@ -791,8 +795,8 @@ class TestSubmit(unittest.TestCase):
         expect_container = copy.deepcopy(stubs.expect_container_spec)
         expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
                                        '--no-log-timestamps', '--disable-validate',
-                                       '--eval-timeout=20', '--thread-count=4',
-                                       '--enable-reuse',
+                                       '--eval-timeout=20', '--thread-count=1',
+                                       '--enable-reuse', "--collection-cache-size=256",
                                        "--output-tags="+output_tags, '--debug', '--on-error=continue',
                                        '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
 
@@ -814,7 +818,7 @@ class TestSubmit(unittest.TestCase):
             logging.exception("")
 
         expect_container = copy.deepcopy(stubs.expect_container_spec)
-        expect_container["runtime_constraints"]["ram"] = 2048*1024*1024
+        expect_container["runtime_constraints"]["ram"] = (2048+256)*1024*1024
 
         stubs.api.container_requests().create.assert_called_with(
             body=JsonDiffMatcher(expect_container))
@@ -876,14 +880,14 @@ class TestSubmit(unittest.TestCase):
             'container_image': '999999999999999999999999999999d3+99',
             'command': ['arvados-cwl-runner', '--local', '--api=containers',
                         '--no-log-timestamps', '--disable-validate',
-                        '--eval-timeout=20', '--thread-count=4',
-                        '--enable-reuse', '--debug', '--on-error=continue',
+                        '--eval-timeout=20', '--thread-count=1',
+                        '--enable-reuse', "--collection-cache-size=256", '--debug', '--on-error=continue',
                         '/var/lib/cwl/workflow/expect_arvworkflow.cwl#main', '/var/lib/cwl/cwl.input.json'],
             'cwd': '/var/spool/cwl',
             'runtime_constraints': {
                 'API': True,
                 'vcpus': 1,
-                'ram': 1073741824
+                'ram': 1342177280
             },
             'use_existing': True,
             'properties': {},
@@ -998,14 +1002,14 @@ class TestSubmit(unittest.TestCase):
             'container_image': "999999999999999999999999999999d3+99",
             'command': ['arvados-cwl-runner', '--local', '--api=containers',
                         '--no-log-timestamps', '--disable-validate',
-                        '--eval-timeout=20', '--thread-count=4',
-                        '--enable-reuse', '--debug', '--on-error=continue',
+                        '--eval-timeout=20', '--thread-count=1',
+                        '--enable-reuse', "--collection-cache-size=256", '--debug', '--on-error=continue',
                         '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json'],
             'cwd': '/var/spool/cwl',
             'runtime_constraints': {
                 'API': True,
                 'vcpus': 1,
-                'ram': 1073741824
+                'ram': 1342177280
             },
             'use_existing': True,
             'properties': {
@@ -1058,8 +1062,9 @@ class TestSubmit(unittest.TestCase):
         expect_container["owner_uuid"] = project_uuid
         expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
                                        '--no-log-timestamps', '--disable-validate',
-                                       "--eval-timeout=20", "--thread-count=4",
-                                       '--enable-reuse', '--debug', '--on-error=continue',
+                                       "--eval-timeout=20", "--thread-count=1",
+                                       '--enable-reuse', "--collection-cache-size=256", '--debug',
+                                       '--on-error=continue',
                                        '--project-uuid='+project_uuid,
                                        '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
 
@@ -1084,8 +1089,9 @@ class TestSubmit(unittest.TestCase):
         expect_container = copy.deepcopy(stubs.expect_container_spec)
         expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
                                        '--no-log-timestamps', '--disable-validate',
-                                       '--eval-timeout=60.0', '--thread-count=4',
-                                       '--enable-reuse', '--debug', '--on-error=continue',
+                                       '--eval-timeout=60.0', '--thread-count=1',
+                                       '--enable-reuse', "--collection-cache-size=256",
+                                       '--debug', '--on-error=continue',
                                        '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
 
         stubs.api.container_requests().create.assert_called_with(
@@ -1093,6 +1099,33 @@ class TestSubmit(unittest.TestCase):
         self.assertEqual(capture_stdout.getvalue(),
                          stubs.expect_container_request_uuid + '\n')
 
+    @stubs
+    def test_submit_container_collection_cache(self, stubs):
+        project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
+        capture_stdout = cStringIO.StringIO()
+        try:
+            exited = arvados_cwl.main(
+                ["--submit", "--no-wait", "--api=containers", "--debug", "--collection-cache-size=500",
+                 "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+                capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
+            self.assertEqual(exited, 0)
+        except:
+            logging.exception("")
+
+        expect_container = copy.deepcopy(stubs.expect_container_spec)
+        expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
+                                       '--no-log-timestamps', '--disable-validate',
+                                       '--eval-timeout=20', '--thread-count=1',
+                                       '--enable-reuse', "--collection-cache-size=500",
+                                       '--debug', '--on-error=continue',
+                                       '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
+        expect_container["runtime_constraints"]["ram"] = (1024+500)*1024*1024
+
+        stubs.api.container_requests().create.assert_called_with(
+            body=JsonDiffMatcher(expect_container))
+        self.assertEqual(capture_stdout.getvalue(),
+                         stubs.expect_container_request_uuid + '\n')
+
 
     @stubs
     def test_submit_container_thread_count(self, stubs):
@@ -1111,7 +1144,8 @@ class TestSubmit(unittest.TestCase):
         expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
                                        '--no-log-timestamps', '--disable-validate',
                                        '--eval-timeout=20', '--thread-count=20',
-                                       '--enable-reuse', '--debug', '--on-error=continue',
+                                       '--enable-reuse', "--collection-cache-size=256",
+                                       '--debug', '--on-error=continue',
                                        '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
 
         stubs.api.container_requests().create.assert_called_with(
@@ -1197,19 +1231,25 @@ class TestSubmit(unittest.TestCase):
         expect_container["runtime_constraints"] = {
             "API": True,
             "vcpus": 2,
-            "ram": 2000 * 2**20
+            "ram": (2000+512) * 2**20
         }
         expect_container["name"] = "submit_wf_runner_resources.cwl"
         expect_container["mounts"]["/var/lib/cwl/workflow.json"]["content"]["$graph"][1]["hints"] = [
             {
                 "class": "http://arvados.org/cwl#WorkflowRunnerResources",
                 "coresMin": 2,
-                "ramMin": 2000
+                "ramMin": 2000,
+                "keep_cache": 512
             }
         ]
         expect_container["mounts"]["/var/lib/cwl/workflow.json"]["content"]["$graph"][0]["$namespaces"] = {
             "arv": "http://arvados.org/cwl#",
         }
+        expect_container['command'] = ['arvados-cwl-runner', '--local', '--api=containers',
+                        '--no-log-timestamps', '--disable-validate',
+                        '--eval-timeout=20', '--thread-count=1',
+                        '--enable-reuse', "--collection-cache-size=512", '--debug', '--on-error=continue',
+                        '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
 
         stubs.api.container_requests().create.assert_called_with(
             body=JsonDiffMatcher(expect_container))
@@ -1277,8 +1317,9 @@ class TestSubmit(unittest.TestCase):
                 "--no-log-timestamps",
                 "--disable-validate",
                 "--eval-timeout=20",
-                '--thread-count=4',
+                '--thread-count=1',
                 "--enable-reuse",
+                "--collection-cache-size=256",
                 '--debug',
                 "--on-error=continue",
                 "/var/lib/cwl/workflow.json#main",
@@ -1406,7 +1447,7 @@ class TestSubmit(unittest.TestCase):
             "properties": {},
             "runtime_constraints": {
                 "API": True,
-                "ram": 1073741824,
+                "ram": 1342177280,
                 "vcpus": 1
             },
             "secret_mounts": {
index 2afbe0cff25f3d26e63253e697f3238468680e0f..a094890650e1a3049f177e9f01ec2330df7c7451 100644 (file)
@@ -22,29 +22,37 @@ def fail_task():
 class TestTaskQueue(unittest.TestCase):
     def test_tq(self):
         tq = TaskQueue(threading.Lock(), 2)
+        try:
+            self.assertIsNone(tq.error)
 
-        self.assertIsNone(tq.error)
-
-        tq.add(success_task)
-        tq.add(success_task)
-        tq.add(success_task)
-        tq.add(success_task)
+            unlock = threading.Lock()
+            unlock.acquire()
+            check_done = threading.Event()
 
-        tq.join()
+            tq.add(success_task, unlock, check_done)
+            tq.add(success_task, unlock, check_done)
+            tq.add(success_task, unlock, check_done)
+            tq.add(success_task, unlock, check_done)
+        finally:
+            tq.join()
 
         self.assertIsNone(tq.error)
 
 
     def test_tq_error(self):
         tq = TaskQueue(threading.Lock(), 2)
-
-        self.assertIsNone(tq.error)
-
-        tq.add(success_task)
-        tq.add(success_task)
-        tq.add(fail_task)
-        tq.add(success_task)
-
-        tq.join()
+        try:
+            self.assertIsNone(tq.error)
+
+            unlock = threading.Lock()
+            unlock.acquire()
+            check_done = threading.Event()
+
+            tq.add(success_task, unlock, check_done)
+            tq.add(success_task, unlock, check_done)
+            tq.add(fail_task, unlock, check_done)
+            tq.add(success_task, unlock, check_done)
+        finally:
+            tq.join()
 
         self.assertIsNotNone(tq.error)
index 9e2712194950627d87c148b76fae14d00f5fac2b..814cd07ab5d0833a5a374e503b6ee1feae00ef87 100644 (file)
@@ -15,6 +15,7 @@ hints:
   arv:WorkflowRunnerResources:
     ramMin: 2000
     coresMin: 2
+    keep_cache: 512
 inputs:
   - id: x
     type: File
index a24d53dad6a629f9d08692bb19dd62e144655a7b..2e6484cabdf1e71d39f5fe21139b29c2ce09ad93 100644 (file)
@@ -8,9 +8,9 @@ import os
 import re
 
 def git_latest_tag():
-    gitinfo = subprocess.check_output(
-        ['git', 'describe', '--abbrev=0']).strip()
-    return str(gitinfo.decode('utf-8'))
+    gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+    gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+    return str(next(iter(gittags)).decode('utf-8'))
 
 def git_timestamp_tag():
     gitinfo = subprocess.check_output(
index a24d53dad6a629f9d08692bb19dd62e144655a7b..2e6484cabdf1e71d39f5fe21139b29c2ce09ad93 100644 (file)
@@ -8,9 +8,9 @@ import os
 import re
 
 def git_latest_tag():
-    gitinfo = subprocess.check_output(
-        ['git', 'describe', '--abbrev=0']).strip()
-    return str(gitinfo.decode('utf-8'))
+    gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+    gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+    return str(next(iter(gittags)).decode('utf-8'))
 
 def git_timestamp_tag():
     gitinfo = subprocess.check_output(
index ccf25c422e62085e1edf3829459f5cdb8a8710ff..f3278fcc1d5e7aeab1f6748f90bc80040e6fce37 100644 (file)
@@ -13,9 +13,9 @@ class EggInfoFromGit(egg_info):
     from source package), leave it alone.
     """
     def git_latest_tag(self):
-        gitinfo = subprocess.check_output(
-            ['git', 'describe', '--abbrev=0']).strip()
-        return str(gitinfo.decode('utf-8'))
+        gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+        gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+        return str(next(iter(gittags)).decode('utf-8'))
 
     def git_timestamp_tag(self):
         gitinfo = subprocess.check_output(
index 609af6e23dda07b2467f6cc78dfe3f69ae00bb65..da919309f4e829f227f3241eb7d41759087dde08 100644 (file)
@@ -7,7 +7,7 @@ if not File.exist?('/usr/bin/git') then
   exit
 end
 
-git_latest_tag = `git describe --abbrev=0`
+git_latest_tag = `git tag -l |sort -V -r |head -n1`
 git_latest_tag = git_latest_tag.encode('utf-8').strip
 git_timestamp, git_hash = `git log -n1 --first-parent --format=%ct:%H .`.chomp.split(":")
 git_timestamp = Time.at(git_timestamp.to_i).utc
index cc15a56f35325f56ea5762c050aa4494f5e5a5d4..93d5b9a0239753a8820d86b883abcbdf1a06b776 100644 (file)
@@ -274,9 +274,8 @@ class ArvadosModel < ActiveRecord::Base
       if !include_trash
         if sql_table != "api_client_authorizations"
           # Only include records where the owner is not trashed
-          sql_conds = "NOT EXISTS(SELECT 1 FROM #{PERMISSION_VIEW} "+
-                      "WHERE trashed = 1 AND "+
-                      "(#{sql_table}.owner_uuid = target_uuid)) #{exclude_trashed_records}"
+          sql_conds = "#{sql_table}.owner_uuid NOT IN (SELECT target_uuid FROM #{PERMISSION_VIEW} "+
+                      "WHERE trashed = 1) #{exclude_trashed_records}"
         end
       end
     else
@@ -294,14 +293,14 @@ class ArvadosModel < ActiveRecord::Base
       # see issue 13208 for details.
 
       # Match a direct read permission link from the user to the record uuid
-      direct_check = "EXISTS(SELECT 1 FROM #{PERMISSION_VIEW} "+
-                     "WHERE user_uuid IN (:user_uuids) AND perm_level >= 1 #{trashed_check} AND target_uuid = #{sql_table}.uuid)"
+      direct_check = "#{sql_table}.uuid IN (SELECT target_uuid FROM #{PERMISSION_VIEW} "+
+                     "WHERE user_uuid IN (:user_uuids) AND perm_level >= 1 #{trashed_check})"
 
       # Match a read permission link from the user to the record's owner_uuid
       owner_check = ""
       if sql_table != "api_client_authorizations" and sql_table != "groups" then
-        owner_check = "OR EXISTS(SELECT 1 FROM #{PERMISSION_VIEW} "+
-          "WHERE user_uuid IN (:user_uuids) AND perm_level >= 1 #{trashed_check} AND target_uuid = #{sql_table}.owner_uuid AND target_owner_uuid IS NOT NULL) "
+        owner_check = "OR #{sql_table}.owner_uuid IN (SELECT target_uuid FROM #{PERMISSION_VIEW} "+
+          "WHERE user_uuid IN (:user_uuids) AND perm_level >= 1 #{trashed_check} AND target_owner_uuid IS NOT NULL) "
       end
 
       links_cond = ""
@@ -403,7 +402,7 @@ class ArvadosModel < ActiveRecord::Base
       cast = serialized_attributes[column] ? '::text' : ''
       "coalesce(#{column}#{cast},'')"
     end
-    "to_tsvector('english', #{parts.join(" || ' ' || ")})"
+    "to_tsvector('english', substr(#{parts.join(" || ' ' || ")}, 0, 8000))"
   end
 
   def self.apply_filters query, filters
diff --git a/services/api/db/migrate/20180917200000_replace_full_text_indexes.rb b/services/api/db/migrate/20180917200000_replace_full_text_indexes.rb
new file mode 100644 (file)
index 0000000..b0eea9e
--- /dev/null
@@ -0,0 +1,14 @@
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+require './db/migrate/20161213172944_full_text_search_indexes'
+
+class ReplaceFullTextIndexes < ActiveRecord::Migration
+  def up
+    FullTextSearchIndexes.new.up
+  end
+
+  def down
+  end
+end
index 5105914df0dbd04ab599790d934f03194021dccf..aa29a1cbb409d59542d0d037cbdf703f9c407ea5 100644 (file)
@@ -1631,7 +1631,7 @@ CREATE INDEX collection_index_on_properties ON public.collections USING gin (pro
 -- Name: collections_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
 --
 
-CREATE INDEX collections_full_text_search_idx ON public.collections USING gin (to_tsvector('english'::regconfig, (((((((((((((((((COALESCE(owner_uuid, ''::character varying))::text || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(portable_data_hash, ''::character varying))::text) || ' '::text) || (COALESCE(uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || COALESCE((properties)::text, ''::text)) || ' '::text) || COALESCE(file_names, (''::character varying)::text))));
+CREATE INDEX collections_full_text_search_idx ON public.collections USING gin (to_tsvector('english'::regconfig, substr((((((((((((((((((COALESCE(owner_uuid, ''::character varying))::text || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(portable_data_hash, ''::character varying))::text) || ' '::text) || (COALESCE(uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || COALESCE((properties)::text, ''::text)) || ' '::text) || COALESCE(file_names, ''::text)), 0, 1000000)));
 
 
 --
@@ -1645,7 +1645,7 @@ CREATE INDEX collections_search_index ON public.collections USING btree (owner_u
 -- Name: container_requests_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
 --
 
-CREATE INDEX container_requests_full_text_search_idx ON public.container_requests USING gin (to_tsvector('english'::regconfig, (((((((((((((((((((((((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(description, ''::text)) || ' '::text) || COALESCE((properties)::text, ''::text)) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || (COALESCE(requesting_container_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(container_uuid, ''::character varying))::text) || ' '::text) || COALESCE(runtime_constraints, ''::text)) || ' '::text) || (COALESCE(container_image, ''::character varying))::text) || ' '::text) || COALESCE(environment, ''::text)) || ' '::text) || (COALESCE(cwd, ''::character varying))::text) || ' '::text) || COALESCE(command, ''::text)) || ' '::text) || (COALESCE(output_path, ''::character varying))::text) || ' '::text) || COALESCE(filters, ''::text)) || ' '::text) || COALESCE(scheduling_parameters, ''::text)) || ' '::text) || (COALESCE(output_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(log_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(output_name, ''::character varying))::text)));
+CREATE INDEX container_requests_full_text_search_idx ON public.container_requests USING gin (to_tsvector('english'::regconfig, substr((((((((((((((((((((((((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(description, ''::text)) || ' '::text) || COALESCE((properties)::text, ''::text)) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || (COALESCE(requesting_container_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(container_uuid, ''::character varying))::text) || ' '::text) || COALESCE(runtime_constraints, ''::text)) || ' '::text) || (COALESCE(container_image, ''::character varying))::text) || ' '::text) || COALESCE(environment, ''::text)) || ' '::text) || (COALESCE(cwd, ''::character varying))::text) || ' '::text) || COALESCE(command, ''::text)) || ' '::text) || (COALESCE(output_path, ''::character varying))::text) || ' '::text) || COALESCE(filters, ''::text)) || ' '::text) || COALESCE(scheduling_parameters, ''::text)) || ' '::text) || (COALESCE(output_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(log_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(output_name, ''::character varying))::text), 0, 1000000)));
 
 
 --
@@ -1680,7 +1680,7 @@ CREATE INDEX group_index_on_properties ON public.groups USING gin (properties);
 -- Name: groups_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
 --
 
-CREATE INDEX groups_full_text_search_idx ON public.groups USING gin (to_tsvector('english'::regconfig, (((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || (COALESCE(group_class, ''::character varying))::text) || ' '::text) || COALESCE((properties)::text, ''::text))));
+CREATE INDEX groups_full_text_search_idx ON public.groups USING gin (to_tsvector('english'::regconfig, substr((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || (COALESCE(group_class, ''::character varying))::text) || ' '::text) || COALESCE((properties)::text, ''::text)), 0, 1000000)));
 
 
 --
@@ -2653,7 +2653,7 @@ CREATE INDEX job_tasks_search_index ON public.job_tasks USING btree (uuid, owner
 -- Name: jobs_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
 --
 
-CREATE INDEX jobs_full_text_search_idx ON public.jobs USING gin (to_tsvector('english'::regconfig, (((((((((((((((((((((((((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(submit_id, ''::character varying))::text) || ' '::text) || (COALESCE(script, ''::character varying))::text) || ' '::text) || (COALESCE(script_version, ''::character varying))::text) || ' '::text) || COALESCE(script_parameters, ''::text)) || ' '::text) || (COALESCE(cancelled_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(cancelled_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(output, ''::character varying))::text) || ' '::text) || (COALESCE(is_locked_by_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(log, ''::character varying))::text) || ' '::text) || COALESCE(tasks_summary, ''::text)) || ' '::text) || COALESCE(runtime_constraints, ''::text)) || ' '::text) || (COALESCE(repository, ''::character varying))::text) || ' '::text) || (COALESCE(supplied_script_version, ''::character varying))::text) || ' '::text) || (COALESCE(docker_image_locator, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || (COALESCE(arvados_sdk_version, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text))));
+CREATE INDEX jobs_full_text_search_idx ON public.jobs USING gin (to_tsvector('english'::regconfig, substr((((((((((((((((((((((((((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(submit_id, ''::character varying))::text) || ' '::text) || (COALESCE(script, ''::character varying))::text) || ' '::text) || (COALESCE(script_version, ''::character varying))::text) || ' '::text) || COALESCE(script_parameters, ''::text)) || ' '::text) || (COALESCE(cancelled_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(cancelled_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(output, ''::character varying))::text) || ' '::text) || (COALESCE(is_locked_by_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(log, ''::character varying))::text) || ' '::text) || COALESCE(tasks_summary, ''::text)) || ' '::text) || COALESCE(runtime_constraints, ''::text)) || ' '::text) || (COALESCE(repository, ''::character varying))::text) || ' '::text) || (COALESCE(supplied_script_version, ''::character varying))::text) || ' '::text) || (COALESCE(docker_image_locator, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || (COALESCE(arvados_sdk_version, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text)), 0, 1000000)));
 
 
 --
@@ -2744,7 +2744,7 @@ CREATE INDEX permission_target_user_trashed_level ON public.materialized_permiss
 -- Name: pipeline_instances_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
 --
 
-CREATE INDEX pipeline_instances_full_text_search_idx ON public.pipeline_instances USING gin (to_tsvector('english'::regconfig, (((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(pipeline_template_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text)) || ' '::text) || COALESCE(properties, ''::text)) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || COALESCE(components_summary, ''::text)) || ' '::text) || (COALESCE(description, ''::character varying))::text)));
+CREATE INDEX pipeline_instances_full_text_search_idx ON public.pipeline_instances USING gin (to_tsvector('english'::regconfig, substr((((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(pipeline_template_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text)) || ' '::text) || COALESCE(properties, ''::text)) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || COALESCE(components_summary, ''::text)) || ' '::text) || (COALESCE(description, ''::character varying))::text), 0, 1000000)));
 
 
 --
@@ -2765,7 +2765,7 @@ CREATE UNIQUE INDEX pipeline_template_owner_uuid_name_unique ON public.pipeline_
 -- Name: pipeline_templates_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
 --
 
-CREATE INDEX pipeline_templates_full_text_search_idx ON public.pipeline_templates USING gin (to_tsvector('english'::regconfig, (((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text)) || ' '::text) || (COALESCE(description, ''::character varying))::text)));
+CREATE INDEX pipeline_templates_full_text_search_idx ON public.pipeline_templates USING gin (to_tsvector('english'::regconfig, substr((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text)) || ' '::text) || (COALESCE(description, ''::character varying))::text), 0, 1000000)));
 
 
 --
@@ -2821,7 +2821,7 @@ CREATE INDEX virtual_machines_search_index ON public.virtual_machines USING btre
 -- Name: workflows_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
 --
 
-CREATE INDEX workflows_full_text_search_idx ON public.workflows USING gin (to_tsvector('english'::regconfig, (((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(description, ''::text))));
+CREATE INDEX workflows_full_text_search_idx ON public.workflows USING gin (to_tsvector('english'::regconfig, substr((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(description, ''::text)), 0, 1000000)));
 
 
 --
@@ -3187,6 +3187,8 @@ INSERT INTO schema_migrations (version) VALUES ('20180913175443');
 
 INSERT INTO schema_migrations (version) VALUES ('20180915155335');
 
+INSERT INTO schema_migrations (version) VALUES ('20180917200000');
+
 INSERT INTO schema_migrations (version) VALUES ('20180917205609');
 
 INSERT INTO schema_migrations (version) VALUES ('20180919001158');
index a24d53dad6a629f9d08692bb19dd62e144655a7b..2e6484cabdf1e71d39f5fe21139b29c2ce09ad93 100644 (file)
@@ -8,9 +8,9 @@ import os
 import re
 
 def git_latest_tag():
-    gitinfo = subprocess.check_output(
-        ['git', 'describe', '--abbrev=0']).strip()
-    return str(gitinfo.decode('utf-8'))
+    gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+    gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+    return str(next(iter(gittags)).decode('utf-8'))
 
 def git_timestamp_tag():
     gitinfo = subprocess.check_output(
index a24d53dad6a629f9d08692bb19dd62e144655a7b..2e6484cabdf1e71d39f5fe21139b29c2ce09ad93 100644 (file)
@@ -8,9 +8,9 @@ import os
 import re
 
 def git_latest_tag():
-    gitinfo = subprocess.check_output(
-        ['git', 'describe', '--abbrev=0']).strip()
-    return str(gitinfo.decode('utf-8'))
+    gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+    gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+    return str(next(iter(gittags)).decode('utf-8'))
 
 def git_timestamp_tag():
     gitinfo = subprocess.check_output(
index f2c5735985a7131129c38469e2183ffb70ef10f6..605e8540ee1df59d3b96618ebef50f4b39567384 100644 (file)
@@ -7,7 +7,7 @@ if not File.exists?('/usr/bin/git') then
   exit
 end
 
-git_latest_tag = `git describe --abbrev=0`
+git_latest_tag = `git tag -l |sort -V -r |head -n1`
 git_latest_tag = git_latest_tag.encode('utf-8').strip
 git_timestamp, git_hash = `git log -n1 --first-parent --format=%ct:%H .`.chomp.split(":")
 git_timestamp = Time.at(git_timestamp.to_i).utc
index a24d53dad6a629f9d08692bb19dd62e144655a7b..2e6484cabdf1e71d39f5fe21139b29c2ce09ad93 100644 (file)
@@ -8,9 +8,9 @@ import os
 import re
 
 def git_latest_tag():
-    gitinfo = subprocess.check_output(
-        ['git', 'describe', '--abbrev=0']).strip()
-    return str(gitinfo.decode('utf-8'))
+    gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+    gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+    return str(next(iter(gittags)).decode('utf-8'))
 
 def git_timestamp_tag():
     gitinfo = subprocess.check_output(
diff --git a/tools/arvbox/lib/arvbox/docker/58118E89F3A912897C070ADBF76221572C52609D.asc b/tools/arvbox/lib/arvbox/docker/58118E89F3A912897C070ADBF76221572C52609D.asc
new file mode 100644 (file)
index 0000000..086bab3
--- /dev/null
@@ -0,0 +1,106 @@
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+
+mQINBFWln24BEADrBl5p99uKh8+rpvqJ48u4eTtjeXAWbslJotmC/CakbNSqOb9o
+ddfzRvGVeJVERt/Q/mlvEqgnyTQy+e6oEYN2Y2kqXceUhXagThnqCoxcEJ3+KM4R
+mYdoe/BJ/J/6rHOjq7Omk24z2qB3RU1uAv57iY5VGw5p45uZB4C4pNNsBJXoCvPn
+TGAs/7IrekFZDDgVraPx/hdiwopQ8NltSfZCyu/jPpWFK28TR8yfVlzYFwibj5WK
+dHM7ZTqlA1tHIG+agyPf3Rae0jPMsHR6q+arXVwMccyOi+ULU0z8mHUJ3iEMIrpT
+X+80KaN/ZjibfsBOCjcfiJSB/acn4nxQQgNZigna32velafhQivsNREFeJpzENiG
+HOoyC6qVeOgKrRiKxzymj0FIMLru/iFF5pSWcBQB7PYlt8J0G80lAcPr6VCiN+4c
+NKv03SdvA69dCOj79PuO9IIvQsJXsSq96HB+TeEmmL+xSdpGtGdCJHHM1fDeCqkZ
+hT+RtBGQL2SEdWjxbF43oQopocT8cHvyX6Zaltn0svoGs+wX3Z/H6/8P5anog43U
+65c0A+64Jj00rNDr8j31izhtQMRo892kGeQAaaxg4Pz6HnS7hRC+cOMHUU4HA7iM
+zHrouAdYeTZeZEQOA7SxtCME9ZnGwe2grxPXh/U/80WJGkzLFNcTKdv+rwARAQAB
+tDdEb2NrZXIgUmVsZWFzZSBUb29sIChyZWxlYXNlZG9ja2VyKSA8ZG9ja2VyQGRv
+Y2tlci5jb20+iQGcBBABCgAGBQJaJYMKAAoJENNu5NUL+WcWfQML/RjicnhN0G28
++Hj3icn/SHYXg8VTHMX7aAuuClZh7GoXlvVlyN0cfRHTcFPkhv1LJ5/zFVwJxlIc
+xX0DlWbv5zlPQQQfNYH7mGCt3OS0QJGDpCM9Q6iw1EqC0CdtBDIZMGn7s9pnuq5C
+3kzer097BltvuXWI+BRMvVad2dhzuOQi76jyxhprTUL6Xwm7ytNSja5Xyigfc8HF
+rXhlQxnMEpWpTttY+En1SaTgGg7/4yB9jG7UqtdaVuAvWI69V+qzJcvgW6do5XwH
+b/5waezxOU033stXcRCYkhEenm+mXzcJYXt2avg1BYIQsZuubCBlpPtZkgWWLOf+
+eQR1Qcy9IdWQsfpH8DX6cEbeiC0xMImcuufI5KDHZQk7E7q8SDbDbk5Dam+2tRef
+eTB2A+MybVQnpsgCvEBNQ2TfcWsZ6uLHMBhesx/+rmyOnpJDTvvCLlkOMTUNPISf
+GJI0IHZFHUJ/+/uRfgIzG6dSqxQ0zHXOwGg4GbhjpQ5I+5Eg2BNRkYkCHAQQAQoA
+BgUCVsO73QAKCRBcs2HlUvsNEB8rD/4t+5uEsqDglXJ8m5dfL88ARHKeFQkW17x7
+zl7ctYHHFSFfP2iajSoAVfe5WN766TsoiHgfBE0HoLK8RRO7fxs9K7Czm6nyxB3Z
+p+YgSUZIS3wqc43jp8gd2dCCQelKIDv5rEFWHuQlyZersK9AJqIggS61ZQwJLcVY
+fUVnIdJdCmUV9haR7vIfrjNP88kqiInZWHy2t8uaB7HFPpxlNYuiJsA0w98rGQuY
+6fWlX71JnBEsgG+L73XAB0fm14QP0VvEB3njBZYlsO2do2B8rh5g51htslK5wqgC
+U61lfjnykSM8yRQbOHvPK7uYdmSF3UXqcP/gjmI9+C8s8UdnMa9rv8b8cFwpEjHu
+xeCmQKYQ/tcLOtRYZ1DIvzxETGH0xbrz6wpKuIMgY7d3xaWdjUf3ylvO0DnlXJ9Y
+r15fYndzDLPSlybIO0GrE+5grHntlSBbMa5BUNozaQ/iQBEUZ/RY+AKxy+U28JJB
+W2Wb0oun6+YdhmwgFyBoSFyp446Kz2P2A1+l/AGhzltc25Vsvwha+lRZfet464yY
+GoNBurTbQWS63JWYFoTkKXmWeS2789mQOQqka3wFXMDzVtXzmxSEbaler7lZbhTj
+wjAAJzp6kdNsPbde4lUIzt6FTdJm0Ivb47hMV4dWKEnFXrYjui0ppUH1RFUU6hyz
+IF8kfxDKO4kCHAQQAQoABgUCV0lgZQAKCRBcs2HlUvsNEHh9EACOm7QH2MGD7gI3
+0VMvapZz4Wfsbda58LFM7G5qPCt10zYfpf0dPJ7tHbHM8N9ENcI7tvH4dTfGsttt
+/uvX9PsiAml6kdfAGxoBRil+76NIHxFWsXSLVDd3hzcnRhc5njimwJa8SDBAp0kx
+v05BVWDvTbZb/b0jdgbqZk2oE0RK8S2Sp1bFkc6fl3pcJYFOQQmelOmXvPmyHOhd
+W2bLX9e1/IulzVf6zgi8dsj9IZ9aLKJY6Cz6VvJ85ML6mLGGwgNvJTLdWqntFFr0
+QqkdM8ZSp9ezWUKo28XGoxDAmo6ENNTLIZjuRlnj1Yr9mmwmf4mgucyqlU93XjCR
+y6u5bpuqoQONRPYCR/UKKk/qoGnYXnhX6AtUD+3JHvrV5mINkd/ad5eR5pviUGz+
+H/VeZqVhMbxxgkm3Gra9+bZ2pCCWboKtqIM7JtXYwks/dttkV5fTqBarJtWzcwO/
+Pv3DreTdnMoVNGzNk/84IeNmGww/iQ1Px0psVCKVPsKxr2RjNhVP7qdA0cTguFNX
+y+hx5Y/JYjSVnxIN74aLoDoeuoBhfYpOY+HiJTaM+pbLfoJr5WUPf/YUQ3qBvgG4
+WXiJUOAgsPmNY//n1MSMyhz1SvmhSXfqCVTb26IyVv0oA3UjLRcKjr18mHB5d9Fr
+NIGVHg8gJjRmXid5BZJZwKQ5niivjokCIgQQAQoADAUCV3uc0wWDB4YfgAAKCRAx
+uBWjAQZ0qe2DEACaq16AaJ2QKtOweqlGk92gQoJ2OCbIW15hW/1660u+X+2CQz8d
+nySXaq22AyBx4Do88b6d54D6TqScyObGJpGroHqAjvyh7v/t/V6oEwe34Ls2qUX2
+77lqfqsz3B0nW/aKZ2oH8ygM3tw0J5y4sAj5bMrxqcwuCs14Fds3v+K2mjsntZCu
+ztHB8mqZp/6v00d0vGGqcl6uVaS04cCQMNUkQ7tGMXlyAEIiH2ksU+/RJLaIqFtg
+klfP3Y7foAY15ymCSQPD9c81+xjbf0XNmBtDreL+rQVtesahU4Pp+Sc23iuXGdY2
+yF13wnGmScojNjM2BoUiffhFeyWBdOTgCFhOEhk0Y1zKrkNqDC0sDAj0B5vhQg/T
+10NLR2MerSk9+MJLHZqFrHXo5f59zUvte/JhtViP5TdO/Yd4ptoEcDspDKLv0FrN
+7xsP8Q6DmBz1doCe06PQS1Z1Sv4UToHRS2RXskUnDc8Cpuex5mDBQO+LV+tNToh4
+ZNcpj9lFHNuaA1qS15X3EVCySZaPyn2WRd6ZisCKtwopRmshVItTTcLmrxu+hHAF
+bVRVFRRSCE8JIZLkWwRyMrcxB2KLBYA+f2nCtD2rqiZ8K8Cr9J1qt2iu5yogCwA/
+ombzzYxWWrt/wD6ixJr5kZwBJZroHB7FkRBcTDIzDFYGBYmClACTvLuOnokCIgQS
+AQoADAUCWKy8/gWDB4YfgAAKCRAkW0txwCm5FmrGD/9lL31LQtn5wxwoZvfEKuMh
+KRw0FDUq59lQpqyMxp7lrZozFUqlH4MLTeEWbFle+R+UbUoVkBnZ/cSvVGwtRVaH
+wUeP9NAqBLtIqt4S0T2T0MW6Ug0DVH7V7uYuFktpv1xmIzcC4gV+LHhp95SPYbWr
+uVMi6ENIMZoEqW9uHOy6n2/nh76dR2NVJiZHt5LbG8YXM/Y+z3XsIenwKQ97YO7x
+yEaM7UdsQSqKVB0isTQXT2wxoA/pDvSyu7jpElD5dOtPPz3r0fQpcQKrq0IMjgcB
+u5X5tQ5uktmmdaAvIwLibUB9A+htFiFP4irSx//Lkn66RLjrSqwtMCsv7wbPvTfc
+fdpcmkR767t1VvjQWj9DBfOMjGJk9eiLkUSHYyQst6ELyVdutAIHRV2GQqfEKJzc
+cD3wKdbaOoABqRVr/ok5Oj0YKSrvk0lW3l8vS/TZXvQppSMdJuaTR8JDy6dGuoKt
+uyFDb0fKf1JU3+Gj3Yy2YEfqX0MjNQsck9pDV647UXXdzF9uh3cYVfPbl+xBYOU9
+d9qRcqMut50AVIxpUepGa4Iw7yOSRPCnPAMNAPSmAdJTaQcRWcUd9LOaZH+ZFLJZ
+mpbvS//jQpoBt++Ir8wl9ZJXICRJcvrQuhCjOSNLFzsNr/wyVLnGwmTjLWoJEA0p
+c0cYtLW6fSGknkvNA7e8LYkCMwQQAQgAHRYhBFI9KC2HD6c70cN9svEo88fgKodF
+BQJZ76NPAAoJEPEo88fgKodFYXwP+wW6F7UpNmKXaddu+aamLTe3uv8OSKUHQbRh
+By1oxfINI7iC+BZl9ycJip0S08JH0F+RZsi1H24+GcP9vGTDgu3z0NcOOD4mPpzM
+jSi2/hbGzh9C84pxRJVLAKrbqCz7YQ6JdNG4RUHW/r0QgKTnTlvikVx7n9QaPrVl
+PsVFU3xv5oQxUHpwNWyvpPGTDiycuaGKekodYhZ0vKzJzfyyaUTgfxvTVVj10jyi
+f+mSfY8YBHhDesgYF1d2CUEPth9z5KC/eDgY7KoWs8ZK6sVL3+tGrnqK/s6jqcsk
+J7Kt4c3k0jU56rUo8+jnu9yUHcBXAjtr1Vz/nwVfqmPzukIF1ZkMqdQqIRtvDyEC
+16yGngMpWEVM3/vIsi2/uUMuGvjEkEmqs2oLK1hf+Y0W6Avq+9fZUQUEk0e4wbpu
+RCqX5OjeQTEEXmAzoMsdAiwFvr1ul+eI/BPy+29OQ77hz3/dotdYYfs1JVkiFUhf
+PJwvpoUOXiA5V56wl3i5tkbRSLRSkLmiLTlCEfClHEK/wwLU4ZKuD5UpW8xL438l
+/Ycnsl7aumnofWoaEREBc1Xbnx9SZbrTT8VctW8XpMVIPxCwJCp/LqHtyEbnptnD
+7QoHtdWexFmQFUIlGaDiaL7nv0BD6RA/HwhVSxU3b3deKDYNpG9QnAzte8KXA9/s
+ejP18gCKiQI4BBMBAgAiBQJVpZ9uAhsvBgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIX
+gAAKCRD3YiFXLFJgnbRfEAC9Uai7Rv20QIDlDogRzd+Vebg4ahyoUdj0CH+nAk40
+RIoq6G26u1e+sdgjpCa8jF6vrx+smpgd1HeJdmpahUX0XN3X9f9qU9oj9A4I1WDa
+lRWJh+tP5WNv2ySy6AwcP9QnjuBMRTnTK27pk1sEMg9oJHK5p+ts8hlSC4SluyMK
+H5NMVy9c+A9yqq9NF6M6d6/ehKfBFFLG9BX+XLBATvf1ZemGVHQusCQebTGv0C0V
+9yqtdPdRWVIEhHxyNHATaVYOafTj/EF0lDxLl6zDT6trRV5n9F1VCEh4Aal8L5Mx
+VPcIZVO7NHT2EkQgn8CvWjV3oKl2GopZF8V4XdJRl90U/WDv/6cmfI08GkzDYBHh
+S8ULWRFwGKobsSTyIvnbk4NtKdnTGyTJCQ8+6i52s+C54PiNgfj2ieNn6oOR7d+b
+NCcG1CdOYY+ZXVOcsjl73UYvtJrO0Rl/NpYERkZ5d/tzw4jZ6FCXgggA/Zxcjk6Y
+1ZvIm8Mt8wLRFH9Nww+FVsCtaCXJLP8DlJLASMD9rl5QS9Ku3u7ZNrr5HWXPHXIT
+X660jglyshch6CWeiUATqjIAzkEQom/kEnOrvJAtkypRJ59vYQOedZ1sFVELMXg2
+UCkD/FwojfnVtjzYaTCeGwFQeqzHmM241iuOmBYPeyTY5veF49aBJA1gEJOQTvBR
+8YkCOQQRAQgAIxYhBDlHZ/sRadXUayJzU3Es9wyw8WURBQJaajQrBYMHhh+AAAoJ
+EHEs9wyw8WURDyEP/iD903EcaiZP68IqUBsdHMxOaxnKZD9H2RTBaTjR6r9UjCOf
+bomXpVzL0dMZw1nHIE7u2VT++5wk+QvcN7epBgOWUb6tNcv3nI3vqMGRR+fKW15V
+J1sUwMOKGC4vlbLRVRWd2bb+oPZWeteOxNIqu/8DHDFHg3LtoYxWbrMYHhvd0ben
+B9GvwoqeBaqAeERKYCEoPZRB5O6ZHccX2HacjwFs4uYvIoRg4WI+ODXVHXCgOVZq
+yRuVAuQUjwkLbKL1vxJ01EWzWwRI6cY9mngFXNTHEkoxNyjzlfpn/YWheRiwpwg+
+ymDL4oj1KHNq06zNl38dZCd0rde3OFNuF904H6D+reYL50YA9lkL9mRtlaiYyo1J
+SOOjdr+qxuelfbLgDSeM75YVSiYiZZO8DWr2Cq/SNp47z4T4Il/yhQ6eAstZOIkF
+KQlBjr+ZtLdUu67sPdgPoT842IwSrRTrirEUd6cyADbRggPHrOoYEooBCrCgDYCM
+K1xxG9f6Q42yvL1zWKollibsvJF8MVwgkWfJJyhLYylmJ8osvX9LNdCJZErVrRTz
+wAM00crp/KIiIDCREEgE+5BiuGdM70gSuy3JXSs78JHA4l2tu1mDBrMxNR+C8lpj
+1pnLFHTfGYwHQSwKm42/JZqbePh6LKblUdS5Np1dl0tk5DDHBluRzhx16H7E
+=lwu7
+-----END PGP PUBLIC KEY BLOCK-----
index f54bf832adaedb7887dbeab91c778c864c305d9a..4f915946f9e402e680c4add0331d4e1c18130b33 100644 (file)
@@ -51,8 +51,9 @@ VOLUME /var/lib/docker
 VOLUME /var/log/nginx
 VOLUME /etc/ssl/private
 
-RUN apt-key adv --no-tty --keyserver hkp://pool.sks-keyservers.net:80 --recv-keys 58118E89F3A912897C070ADBF76221572C52609D || \
-    apt-key adv --no-tty --keyserver hkp://pgp.mit.edu:80 --recv-keys 58118E89F3A912897C070ADBF76221572C52609D
+ADD 58118E89F3A912897C070ADBF76221572C52609D.asc /tmp/
+RUN apt-key add --no-tty /tmp/58118E89F3A912897C070ADBF76221572C52609D.asc && \
+    rm -f /tmp/58118E89F3A912897C070ADBF76221572C52609D.asc
 
 RUN mkdir -p /etc/apt/sources.list.d && \
     echo deb https://apt.dockerproject.org/repo debian-stretch main > /etc/apt/sources.list.d/docker.list && \
index a24d53dad6a629f9d08692bb19dd62e144655a7b..2e6484cabdf1e71d39f5fe21139b29c2ce09ad93 100644 (file)
@@ -8,9 +8,9 @@ import os
 import re
 
 def git_latest_tag():
-    gitinfo = subprocess.check_output(
-        ['git', 'describe', '--abbrev=0']).strip()
-    return str(gitinfo.decode('utf-8'))
+    gittags = subprocess.check_output(['git', 'tag', '-l']).split()
+    gittags.sort(key=lambda s: [int(u) for u in s.split(b'.')],reverse=True)
+    return str(next(iter(gittags)).decode('utf-8'))
 
 def git_timestamp_tag():
     gitinfo = subprocess.check_output(