15133: More test updates. Remove arv-run.
authorPeter Amstutz <pamstutz@veritasgenetics.com>
Thu, 8 Aug 2019 19:46:42 +0000 (15:46 -0400)
committerPeter Amstutz <pamstutz@veritasgenetics.com>
Thu, 8 Aug 2019 19:46:42 +0000 (15:46 -0400)
Arvados-DCO-1.1-Signed-off-by: Peter Amstutz <pamstutz@veritasgenetics.com>

apps/workbench/test/unit/disabled_api_test.rb
lib/config/generated_config.go
sdk/go/arvadosclient/arvadosclient_test.go
sdk/python/arvados/commands/run.py
sdk/python/bin/arv-run [deleted file]
sdk/python/setup.py
sdk/python/tests/test_arv_run.py [deleted file]
sdk/python/tests/test_pipeline_template.py [deleted file]
sdk/python/tests/test_retry_job_helpers.py
services/api/test/unit/job_test.rb

index 9e18a7063fc3e164fd6752d07c34703fb64aa415..54e7c08197109ba1cf26f9d73ccb15caa3fcb194 100644 (file)
@@ -5,13 +5,7 @@
 require 'test_helper'
 
 class DisabledApiTest < ActiveSupport::TestCase
-  test 'Job.creatable? reflects whether jobs.create API is enabled' do
-    use_token(:active) do
-      assert(Job.creatable?)
-    end
-    dd = ArvadosApiClient.new_or_current.discovery.deep_dup
-    dd[:resources][:jobs][:methods].delete(:create)
-    ArvadosApiClient.any_instance.stubs(:discovery).returns(dd)
+  test 'Job.creatable? is false' do
     use_token(:active) do
       refute(Job.creatable?)
     end
index 602f30e1dae5480bb22ed39e3b0a9bf8c1e04e8f..971b810f5aab6371a4c6a76ef8bc8e72c009f389 100644 (file)
@@ -576,7 +576,11 @@ Clusters:
           AssignNodeHostname: "compute%<slot_number>d"
 
       JobsAPI:
-        # Enable the legacy Jobs API.  This value must be a string.
+        # Enable the legacy 'jobs' API (crunch v1).  This value must be a string.
+        #
+        # Note: this only enables read-only access, creating new
+        # legacy jobs and pipelines is not supported.
+        #
         # 'auto' -- (default) enable the Jobs API only if it has been used before
         #         (i.e., there are job records in the database)
         # 'true' -- enable the Jobs API despite lack of existing records.
@@ -589,30 +593,6 @@ Clusters:
         # {git_repositories_dir}/arvados/.git
         GitInternalDir: /var/lib/arvados/internal.git
 
-        # Docker image to be used when none found in runtime_constraints of a job
-        DefaultDockerImage: ""
-
-        # none or slurm_immediate
-        CrunchJobWrapper: none
-
-        # username, or false = do not set uid when running jobs.
-        CrunchJobUser: crunch
-
-        # The web service must be able to create/write this file, and
-        # crunch-job must be able to stat() it.
-        CrunchRefreshTrigger: /tmp/crunch_refresh_trigger
-
-        # Control job reuse behavior when two completed jobs match the
-        # search criteria and have different outputs.
-        #
-        # If true, in case of a conflict, reuse the earliest job (this is
-        # similar to container reuse behavior).
-        #
-        # If false, in case of a conflict, do not reuse any completed job,
-        # but do reuse an already-running job if available (this is the
-        # original job reuse behavior, and is still the default).
-        ReuseJobIfOutputsDiffer: false
-
       CloudVMs:
         # Enable the cloud scheduler (experimental).
         Enable: false
index 372f09d14bb14f929a40020523c75da69617cdbc..ecd9e2c6e0263d1bf1c7f320f47f3e073f7f2220 100644 (file)
@@ -10,7 +10,6 @@ import (
        "net/http"
        "os"
        "testing"
-       "time"
 
        "git.curoverse.com/arvados.git/sdk/go/arvadostest"
        . "gopkg.in/check.v1"
@@ -114,55 +113,6 @@ func (s *ServerRequiredSuite) TestInvalidResourceType(c *C) {
        c.Assert(len(getback), Equals, 0)
 }
 
-func (s *ServerRequiredSuite) TestCreatePipelineTemplate(c *C) {
-       arv, err := MakeArvadosClient()
-
-       for _, idleConnections := range []bool{
-               false,
-               true,
-       } {
-               if idleConnections {
-                       arv.lastClosedIdlesAt = time.Now().Add(-time.Minute)
-               } else {
-                       arv.lastClosedIdlesAt = time.Now()
-               }
-
-               getback := make(Dict)
-               err = arv.Create("pipeline_templates",
-                       Dict{"pipeline_template": Dict{
-                               "name": "tmp",
-                               "components": Dict{
-                                       "c1": map[string]string{"script": "script1"},
-                                       "c2": map[string]string{"script": "script2"}}}},
-                       &getback)
-               c.Assert(err, Equals, nil)
-               c.Assert(getback["name"], Equals, "tmp")
-               c.Assert(getback["components"].(map[string]interface{})["c2"].(map[string]interface{})["script"], Equals, "script2")
-
-               uuid := getback["uuid"].(string)
-
-               getback = make(Dict)
-               err = arv.Get("pipeline_templates", uuid, nil, &getback)
-               c.Assert(err, Equals, nil)
-               c.Assert(getback["name"], Equals, "tmp")
-               c.Assert(getback["components"].(map[string]interface{})["c1"].(map[string]interface{})["script"], Equals, "script1")
-
-               getback = make(Dict)
-               err = arv.Update("pipeline_templates", uuid,
-                       Dict{
-                               "pipeline_template": Dict{"name": "tmp2"}},
-                       &getback)
-               c.Assert(err, Equals, nil)
-               c.Assert(getback["name"], Equals, "tmp2")
-
-               c.Assert(getback["uuid"].(string), Equals, uuid)
-               getback = make(Dict)
-               err = arv.Delete("pipeline_templates", uuid, nil, &getback)
-               c.Assert(err, Equals, nil)
-               c.Assert(getback["name"], Equals, "tmp2")
-       }
-}
-
 func (s *ServerRequiredSuite) TestErrorResponse(c *C) {
        arv, _ := MakeArvadosClient()
 
index b17ed291807ab88de5948cfcdfaf6562bea5d009..f0d518d4e9c7a8b917a363afdb749e6d0022a7db 100644 (file)
@@ -39,34 +39,6 @@ import arvados.config as config
 
 from arvados._version import __version__
 
-logger = logging.getLogger('arvados.arv-run')
-logger.setLevel(logging.INFO)
-
-arvrun_parser = argparse.ArgumentParser(parents=[arv_cmd.retry_opt])
-arvrun_parser.add_argument('--dry-run', action="store_true",
-                           help="Print out the pipeline that would be submitted and exit")
-arvrun_parser.add_argument('--local', action="store_true",
-                           help="Run locally using arv-run-pipeline-instance")
-arvrun_parser.add_argument('--docker-image', type=str,
-                           help="Docker image to use, otherwise use instance default.")
-arvrun_parser.add_argument('--ignore-rcode', action="store_true",
-                           help="Commands that return non-zero return codes should not be considered failed.")
-arvrun_parser.add_argument('--no-reuse', action="store_true",
-                           help="Do not reuse past jobs.")
-arvrun_parser.add_argument('--no-wait', action="store_true",
-                           help="Do not wait and display logs after submitting command, just exit.")
-arvrun_parser.add_argument('--project-uuid', type=str,
-                           help="Parent project of the pipeline")
-arvrun_parser.add_argument('--git-dir', type=str, default="",
-                           help="Git repository passed to arv-crunch-job when using --local")
-arvrun_parser.add_argument('--repository', type=str, default="arvados",
-                           help="repository field of component, default 'arvados'")
-arvrun_parser.add_argument('--script-version', type=str, default="master",
-                           help="script_version field of component, default 'master'")
-arvrun_parser.add_argument('--version', action='version',
-                           version="%s %s" % (sys.argv[0], __version__),
-                           help='Print version and exit.')
-arvrun_parser.add_argument('args', nargs=argparse.REMAINDER)
 
 class ArvFile(object):
     def __init__(self, prefix, fn):
@@ -270,164 +242,7 @@ def uploadfiles(files, api, dry_run=False, num_retries=0,
 
 
 def main(arguments=None):
-    args = arvrun_parser.parse_args(arguments)
-
-    if len(args.args) == 0:
-        arvrun_parser.print_help()
-        return
-
-    starting_args = args.args
-
-    reading_into = 2
-
-    # Parse the command arguments into 'slots'.
-    # All words following '>' are output arguments and are collected into slots[0].
-    # All words following '<' are input arguments and are collected into slots[1].
-    # slots[2..] store the parameters of each command in the pipeline.
-    #
-    # e.g. arv-run foo arg1 arg2 '|' bar arg3 arg4 '<' input1 input2 input3 '>' output.txt
-    # will be parsed into:
-    #   [['output.txt'],
-    #    ['input1', 'input2', 'input3'],
-    #    ['foo', 'arg1', 'arg2'],
-    #    ['bar', 'arg3', 'arg4']]
-    slots = [[], [], []]
-    for c in args.args:
-        if c.startswith('>'):
-            reading_into = 0
-            if len(c) > 1:
-                slots[reading_into].append(c[1:])
-        elif c.startswith('<'):
-            reading_into = 1
-            if len(c) > 1:
-                slots[reading_into].append(c[1:])
-        elif c == '|':
-            reading_into = len(slots)
-            slots.append([])
-        else:
-            slots[reading_into].append(c)
-
-    if slots[0] and len(slots[0]) > 1:
-        logger.error("Can only specify a single stdout file (run-command substitutions are permitted)")
-        return
-
-    if not args.dry_run:
-        api = arvados.api('v1')
-        if args.project_uuid:
-            project = args.project_uuid
-        else:
-            project = determine_project(os.getcwd(), api.users().current().execute()["uuid"])
-
-    # Identify input files.  Look at each parameter and test to see if there is
-    # a file by that name.  This uses 'patterns' to look for within
-    # command line arguments, such as --foo=file.txt or -lfile.txt
-    patterns = [re.compile("([^=]+=)(.*)"),
-                re.compile("(-[A-Za-z])(.+)")]
-    for j, command in enumerate(slots[1:]):
-        for i, a in enumerate(command):
-            if j > 0 and i == 0:
-                # j == 0 is stdin, j > 0 is commands
-                # always skip program executable (i == 0) in commands
-                pass
-            elif a.startswith('\\'):
-                # if it starts with a \ then don't do any interpretation
-                command[i] = a[1:]
-            else:
-                # See if it looks like a file
-                command[i] = statfile('', a)
-
-                # If a file named command[i] was found, it would now be an
-                # ArvFile or UploadFile.  If command[i] is a basestring, that
-                # means it doesn't correspond exactly to a file, so do some
-                # pattern matching.
-                if isinstance(command[i], basestring):
-                    for p in patterns:
-                        m = p.match(a)
-                        if m:
-                            command[i] = statfile(m.group(1), m.group(2))
-                            break
-
-    files = [c for command in slots[1:] for c in command if isinstance(c, UploadFile)]
-    if files:
-        uploadfiles(files, api, dry_run=args.dry_run, num_retries=args.retries, project=project)
-
-    for i in range(1, len(slots)):
-        slots[i] = [("%s%s" % (c.prefix, c.fn)) if isinstance(c, ArvFile) else c for c in slots[i]]
-
-    component = {
-        "script": "run-command",
-        "script_version": args.script_version,
-        "repository": args.repository,
-        "script_parameters": {
-        },
-        "runtime_constraints": {}
-    }
-
-    if args.docker_image:
-        component["runtime_constraints"]["docker_image"] = args.docker_image
-
-    task_foreach = []
-    group_parser = argparse.ArgumentParser()
-    group_parser.add_argument('-b', '--batch-size', type=int)
-    group_parser.add_argument('args', nargs=argparse.REMAINDER)
-
-    for s in range(2, len(slots)):
-        for i in range(0, len(slots[s])):
-            if slots[s][i] == '--':
-                inp = "input%i" % (s-2)
-                groupargs = group_parser.parse_args(slots[2][i+1:])
-                if groupargs.batch_size:
-                    component["script_parameters"][inp] = {"value": {"batch":groupargs.args, "size":groupargs.batch_size}}
-                    slots[s] = slots[s][0:i] + [{"foreach": inp, "command": "$(%s)" % inp}]
-                else:
-                    component["script_parameters"][inp] = groupargs.args
-                    slots[s] = slots[s][0:i] + ["$(%s)" % inp]
-                task_foreach.append(inp)
-                break
-            if slots[s][i] == '\--':
-                slots[s][i] = '--'
-
-    if slots[0]:
-        component["script_parameters"]["task.stdout"] = slots[0][0]
-    if slots[1]:
-        task_foreach.append("stdin")
-        component["script_parameters"]["stdin"] = slots[1]
-        component["script_parameters"]["task.stdin"] = "$(stdin)"
-
-    if task_foreach:
-        component["script_parameters"]["task.foreach"] = task_foreach
-
-    component["script_parameters"]["command"] = slots[2:]
-    if args.ignore_rcode:
-        component["script_parameters"]["task.ignore_rcode"] = args.ignore_rcode
-
-    pipeline = {
-        "name": "arv-run " + " | ".join([s[0] for s in slots[2:]]),
-        "description": "@" + " ".join(starting_args) + "@",
-        "components": {
-            "command": component
-        },
-        "state": "RunningOnClient" if args.local else "RunningOnServer"
-    }
-
-    if args.dry_run:
-        print(json.dumps(pipeline, indent=4))
-    else:
-        pipeline["owner_uuid"] = project
-        pi = api.pipeline_instances().create(body=pipeline, ensure_unique_name=True).execute()
-        logger.info("Running pipeline %s", pi["uuid"])
-
-        if args.local:
-            subprocess.call(["arv-run-pipeline-instance", "--instance", pi["uuid"], "--run-jobs-here"] + (["--no-reuse"] if args.no_reuse else []))
-        elif not args.no_wait:
-            ws.main(["--pipeline", pi["uuid"]])
-
-        pi = api.pipeline_instances().get(uuid=pi["uuid"]).execute()
-        logger.info("Pipeline is %s", pi["state"])
-        if "output_uuid" in pi["components"]["command"]:
-            logger.info("Output is %s", pi["components"]["command"]["output_uuid"])
-        else:
-            logger.info("No output")
+    raise Exception("Legacy arv-run removed.")
 
 if __name__ == '__main__':
     main()
diff --git a/sdk/python/bin/arv-run b/sdk/python/bin/arv-run
deleted file mode 100755 (executable)
index ebba201..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env python
-# Copyright (C) The Arvados Authors. All rights reserved.
-#
-# SPDX-License-Identifier: Apache-2.0
-
-from arvados.commands.run import main
-main()
index 0fc2dde31b8a7851ff3db066062f068ebdbc9ff7..5130d187d3871a4db09a4c82f6c204137c23fd97 100644 (file)
@@ -40,7 +40,6 @@ setup(name='arvados-python-client',
           'bin/arv-federation-migrate',
           'bin/arv-normalize',
           'bin/arv-put',
-          'bin/arv-run',
           'bin/arv-ws'
       ],
       data_files=[
diff --git a/sdk/python/tests/test_arv_run.py b/sdk/python/tests/test_arv_run.py
deleted file mode 100644 (file)
index 1afc120..0000000
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (C) The Arvados Authors. All rights reserved.
-#
-# SPDX-License-Identifier: Apache-2.0
-
-from __future__ import absolute_import
-import os
-import sys
-import tempfile
-import unittest
-
-import arvados.commands.run as arv_run
-from . import arvados_testutil as tutil
-
-class ArvRunTestCase(unittest.TestCase, tutil.VersionChecker):
-    def run_arv_run(self, args):
-        sys.argv = ['arv-run'] + args
-        return arv_run.main()
-
-    def test_unsupported_arg(self):
-        with self.assertRaises(SystemExit):
-            self.run_arv_run(['-x=unknown'])
-
-    def test_version_argument(self):
-        with tutil.redirected_streams(
-                stdout=tutil.StringIO, stderr=tutil.StringIO) as (out, err):
-            with self.assertRaises(SystemExit):
-                self.run_arv_run(['--version'])
-        self.assertVersionOutput(out, err)
diff --git a/sdk/python/tests/test_pipeline_template.py b/sdk/python/tests/test_pipeline_template.py
deleted file mode 100644 (file)
index 88138f3..0000000
+++ /dev/null
@@ -1,62 +0,0 @@
-# Copyright (C) The Arvados Authors. All rights reserved.
-#
-# SPDX-License-Identifier: Apache-2.0
-
-from __future__ import absolute_import
-# usage example:
-#
-# ARVADOS_API_TOKEN=abc ARVADOS_API_HOST=arvados.local python -m unittest discover
-
-import unittest
-import arvados
-import apiclient
-from . import run_test_server
-
-class PipelineTemplateTest(run_test_server.TestCaseWithServers):
-    MAIN_SERVER = {}
-    KEEP_SERVER = {}
-
-    def runTest(self):
-        run_test_server.authorize_with("admin")
-        pt_uuid = arvados.api('v1').pipeline_templates().create(
-            body={'name':__file__}
-            ).execute()['uuid']
-        self.assertEqual(len(pt_uuid), 27,
-                         'Unexpected format of pipeline template UUID ("%s")'
-                         % pt_uuid)
-        components = {
-            'x': 'x',
-            '-x-': [1,2,{'foo':'bar'}],
-            'Boggis': {'Bunce': '[\'Bean\']'},
-            'SpassBox': True,
-            'spass_box': False,
-            'spass-box': [True, 'Maybe', False]
-            }
-        update_response = arvados.api('v1').pipeline_templates().update(
-            uuid=pt_uuid,
-            body={'components':components}
-            ).execute()
-        self.assertEqual('uuid' in update_response, True,
-                         'update() response did not include a uuid')
-        self.assertEqual(update_response['uuid'], pt_uuid,
-                         'update() response has a different uuid (%s, not %s)'
-                         % (update_response['uuid'], pt_uuid))
-        self.assertEqual(update_response['name'], __file__,
-                         'update() response has a different name (%s, not %s)'
-                         % (update_response['name'], __file__))
-        get_response = arvados.api('v1').pipeline_templates().get(
-            uuid=pt_uuid
-            ).execute()
-        self.assertEqual(get_response['components'], components,
-                         'components got munged by server (%s -> %s)'
-                         % (components, update_response['components']))
-        delete_response = arvados.api('v1').pipeline_templates().delete(
-            uuid=pt_uuid
-            ).execute()
-        self.assertEqual(delete_response['uuid'], pt_uuid,
-                         'delete() response has wrong uuid (%s, not %s)'
-                         % (delete_response['uuid'], pt_uuid))
-        with self.assertRaises(apiclient.errors.HttpError):
-            geterror_response = arvados.api('v1').pipeline_templates().get(
-                uuid=pt_uuid
-                ).execute()
index b9c87a68f2d9f17e7893f6689e106a0132e65c5f..76c62cb0ce9a5c5424db155bfec1c2ace5ac6df8 100644 (file)
@@ -76,34 +76,3 @@ class CurrentJobTestCase(ApiClientRetryTestMixin, unittest.TestCase):
 
     def run_method(self):
         arvados.current_job()
-
-
-class CurrentTaskTestCase(ApiClientRetryTestMixin, unittest.TestCase):
-
-    DEFAULT_EXCEPTION = arvados.errors.ApiError
-
-    def setUp(self):
-        super(CurrentTaskTestCase, self).setUp()
-        os.environ['TASK_UUID'] = 'zzzzz-zzzzz-zzzzzzzzzzzzzzz'
-        os.environ['TASK_WORK'] = '.'
-
-    def tearDown(self):
-        del os.environ['TASK_UUID']
-        del os.environ['TASK_WORK']
-        arvados._current_task = None
-        super(CurrentTaskTestCase, self).tearDown()
-
-    def run_method(self):
-        arvados.current_task()
-
-
-class TaskSetOutputTestCase(CurrentTaskTestCase, unittest.TestCase):
-
-    DEFAULT_EXCEPTION = arvados.errors.ApiError
-
-    def tearDown(self):
-        super(TaskSetOutputTestCase, self).tearDown()
-        run_test_server.reset()
-
-    def run_method(self, locator=ApiClientRetryTestMixin.TEST_LOCATOR):
-        arvados.task_set_output({'uuid':self.TEST_UUID},s=locator)
index c89c64c8cb2e06f7255d21217bb6e235c27125ff..5ece9ab499a342a2e95466241c9b885ade8a8c71 100644 (file)
@@ -639,7 +639,7 @@ class JobTest < ActiveSupport::TestCase
   test 'enable legacy api configuration option = true' do
     Rails.configuration.Containers.JobsAPI.Enable = "true"
     check_enable_legacy_jobs_api
-    assert_equal({}, Rails.configuration.API.DisabledAPIs)
+    assert_equal(Disable_update_jobs_api_method_list, Rails.configuration.API.DisabledAPIs)
   end
 
   test 'enable legacy api configuration option = false' do
@@ -652,7 +652,7 @@ class JobTest < ActiveSupport::TestCase
     Rails.configuration.Containers.JobsAPI.Enable = "auto"
     assert Job.count > 0
     check_enable_legacy_jobs_api
-    assert_equal({}, Rails.configuration.API.DisabledAPIs)
+    assert_equal(Disable_update_jobs_api_method_list, Rails.configuration.API.DisabledAPIs)
   end
 
   test 'enable legacy api configuration option = auto, no jobs' do