Merge branch 'master' into 16950-add-costanalyzer
authorWard Vandewege <ward@curii.com>
Wed, 18 Nov 2020 21:23:51 +0000 (16:23 -0500)
committerWard Vandewege <ward@curii.com>
Wed, 18 Nov 2020 21:23:51 +0000 (16:23 -0500)
refs #16950

Arvados-DCO-1.1-Signed-off-by: Ward Vandewege <ward@curii.com>

cmd/arvados-client/cmd.go
lib/costanalyzer/cmd.go [new file with mode: 0644]
lib/costanalyzer/costanalyzer.go [new file with mode: 0644]
lib/costanalyzer/costanalyzer_test.go [new file with mode: 0644]
sdk/go/arvadostest/fixtures.go
services/api/test/fixtures/collections.yml
services/api/test/fixtures/container_requests.yml
services/api/test/fixtures/containers.yml

index bcc3dda09ac91559d4a35227ef81c95bf3e979cd..47fcd5ad7dc88275c5c9ce47369f3432ac861632 100644 (file)
@@ -9,6 +9,7 @@ import (
 
        "git.arvados.org/arvados.git/lib/cli"
        "git.arvados.org/arvados.git/lib/cmd"
+       "git.arvados.org/arvados.git/lib/costanalyzer"
        "git.arvados.org/arvados.git/lib/deduplicationreport"
        "git.arvados.org/arvados.git/lib/mount"
 )
@@ -55,6 +56,7 @@ var (
 
                "mount":                mount.Command,
                "deduplication-report": deduplicationreport.Command,
+               "costanalyzer":         costanalyzer.Command,
        })
 )
 
diff --git a/lib/costanalyzer/cmd.go b/lib/costanalyzer/cmd.go
new file mode 100644 (file)
index 0000000..9b06852
--- /dev/null
@@ -0,0 +1,43 @@
+// Copyright (C) The Arvados Authors. All rights reserved.
+//
+// SPDX-License-Identifier: Apache-2.0
+
+package costanalyzer
+
+import (
+       "io"
+
+       "git.arvados.org/arvados.git/lib/config"
+       "git.arvados.org/arvados.git/sdk/go/ctxlog"
+       "github.com/sirupsen/logrus"
+)
+
+var Command command
+
+type command struct{}
+
+type NoPrefixFormatter struct{}
+
+func (f *NoPrefixFormatter) Format(entry *logrus.Entry) ([]byte, error) {
+       return []byte(entry.Message), nil
+}
+
+// RunCommand implements the subcommand "costanalyzer <collection> <collection> ..."
+func (command) RunCommand(prog string, args []string, stdin io.Reader, stdout, stderr io.Writer) int {
+       var err error
+       logger := ctxlog.New(stderr, "text", "info")
+       defer func() {
+               if err != nil {
+                       logger.Error("\n" + err.Error() + "\n")
+               }
+       }()
+
+       logger.SetFormatter(new(NoPrefixFormatter))
+
+       loader := config.NewLoader(stdin, logger)
+       loader.SkipLegacy = true
+
+       exitcode, err := costanalyzer(prog, args, loader, logger, stdout, stderr)
+
+       return exitcode
+}
diff --git a/lib/costanalyzer/costanalyzer.go b/lib/costanalyzer/costanalyzer.go
new file mode 100644 (file)
index 0000000..4284542
--- /dev/null
@@ -0,0 +1,528 @@
+// Copyright (C) The Arvados Authors. All rights reserved.
+//
+// SPDX-License-Identifier: AGPL-3.0
+
+package costanalyzer
+
+import (
+       "encoding/json"
+       "errors"
+       "flag"
+       "fmt"
+       "git.arvados.org/arvados.git/lib/config"
+       "git.arvados.org/arvados.git/sdk/go/arvados"
+       "git.arvados.org/arvados.git/sdk/go/arvadosclient"
+       "git.arvados.org/arvados.git/sdk/go/keepclient"
+       "io"
+       "io/ioutil"
+       "net/http"
+       "os"
+       "strconv"
+       "strings"
+       "time"
+
+       "github.com/sirupsen/logrus"
+)
+
+type nodeInfo struct {
+       // Legacy (records created by Arvados Node Manager with Arvados <= 1.4.3)
+       Properties struct {
+               CloudNode struct {
+                       Price float64
+                       Size  string
+               } `json:"cloud_node"`
+       }
+       // Modern
+       ProviderType string
+       Price        float64
+}
+
+type arrayFlags []string
+
+func (i *arrayFlags) String() string {
+       return ""
+}
+
+func (i *arrayFlags) Set(value string) error {
+       *i = append(*i, value)
+       return nil
+}
+
+func parseFlags(prog string, args []string, loader *config.Loader, logger *logrus.Logger, stderr io.Writer) (exitCode int, uuids arrayFlags, resultsDir string, cache bool, err error) {
+       flags := flag.NewFlagSet("", flag.ContinueOnError)
+       flags.SetOutput(stderr)
+       flags.Usage = func() {
+               fmt.Fprintf(flags.Output(), `
+Usage:
+  %s [options ...]
+
+       This program analyzes the cost of Arvados container requests. For each uuid
+       supplied, it creates a CSV report that lists all the containers used to
+       fulfill the container request, together with the machine type and cost of
+       each container.
+
+       When supplied with the uuid of a container request, it will calculate the
+       cost of that container request and all its children. When suplied with a
+       project uuid or when supplied with multiple container request uuids, it will
+       create a CSV report for each supplied uuid, as well as a CSV file with
+       aggregate cost accounting for all supplied uuids. The aggregate cost report
+       takes container reuse into account: if a container was reused between several
+       container requests, its cost will only be counted once.
+
+       To get the node costs, the progam queries the Arvados API for current cost
+       data for each node type used. This means that the reported cost always
+       reflects the cost data as currently defined in the Arvados API configuration
+       file.
+
+       Caveats:
+       - the Arvados API configuration cost data may be out of sync with the cloud
+       provider.
+       - when generating reports for older container requests, the cost data in the
+       Arvados API configuration file may have changed since the container request
+       was fulfilled. This program uses the cost data stored at the time of the
+       execution of the container, stored in the 'node.json' file in its log
+       collection.
+
+       In order to get the data for the uuids supplied, the ARVADOS_API_HOST and
+       ARVADOS_API_TOKEN environment variables must be set.
+
+Options:
+`, prog)
+               flags.PrintDefaults()
+       }
+       loglevel := flags.String("log-level", "info", "logging `level` (debug, info, ...)")
+       flags.StringVar(&resultsDir, "output", "", "output `directory` for the CSV reports (required)")
+       flags.Var(&uuids, "uuid", "Toplevel `project or container request` uuid. May be specified more than once. (required)")
+       flags.BoolVar(&cache, "cache", true, "create and use a local disk cache of Arvados objects")
+       err = flags.Parse(args)
+       if err == flag.ErrHelp {
+               err = nil
+               exitCode = 1
+               return
+       } else if err != nil {
+               exitCode = 2
+               return
+       }
+
+       if len(uuids) < 1 {
+               flags.Usage()
+               err = fmt.Errorf("Error: no uuid(s) provided")
+               exitCode = 2
+               return
+       }
+
+       if resultsDir == "" {
+               flags.Usage()
+               err = fmt.Errorf("Error: output directory must be specified")
+               exitCode = 2
+               return
+       }
+
+       lvl, err := logrus.ParseLevel(*loglevel)
+       if err != nil {
+               exitCode = 2
+               return
+       }
+       logger.SetLevel(lvl)
+       if !cache {
+               logger.Debug("Caching disabled\n")
+       }
+       return
+}
+
+func ensureDirectory(logger *logrus.Logger, dir string) (err error) {
+       statData, err := os.Stat(dir)
+       if os.IsNotExist(err) {
+               err = os.MkdirAll(dir, 0700)
+               if err != nil {
+                       return fmt.Errorf("error creating directory %s: %s", dir, err.Error())
+               }
+       } else {
+               if !statData.IsDir() {
+                       return fmt.Errorf("the path %s is not a directory", dir)
+               }
+       }
+       return
+}
+
+func addContainerLine(logger *logrus.Logger, node nodeInfo, cr arvados.ContainerRequest, container arvados.Container) (csv string, cost float64) {
+       csv = cr.UUID + ","
+       csv += cr.Name + ","
+       csv += container.UUID + ","
+       csv += string(container.State) + ","
+       if container.StartedAt != nil {
+               csv += container.StartedAt.String() + ","
+       } else {
+               csv += ","
+       }
+
+       var delta time.Duration
+       if container.FinishedAt != nil {
+               csv += container.FinishedAt.String() + ","
+               delta = container.FinishedAt.Sub(*container.StartedAt)
+               csv += strconv.FormatFloat(delta.Seconds(), 'f', 0, 64) + ","
+       } else {
+               csv += ",,"
+       }
+       var price float64
+       var size string
+       if node.Properties.CloudNode.Price != 0 {
+               price = node.Properties.CloudNode.Price
+               size = node.Properties.CloudNode.Size
+       } else {
+               price = node.Price
+               size = node.ProviderType
+       }
+       cost = delta.Seconds() / 3600 * price
+       csv += size + "," + strconv.FormatFloat(price, 'f', 8, 64) + "," + strconv.FormatFloat(cost, 'f', 8, 64) + "\n"
+       return
+}
+
+func loadCachedObject(logger *logrus.Logger, file string, uuid string, object interface{}) (reload bool) {
+       reload = true
+       if strings.Contains(uuid, "-j7d0g-") {
+               // We do not cache projects, they have no final state
+               return
+       }
+       // See if we have a cached copy of this object
+       _, err := os.Stat(file)
+       if err != nil {
+               return
+       }
+       data, err := ioutil.ReadFile(file)
+       if err != nil {
+               logger.Errorf("error reading %q: %s", file, err)
+               return
+       }
+       err = json.Unmarshal(data, &object)
+       if err != nil {
+               logger.Errorf("failed to unmarshal json: %s: %s", data, err)
+               return
+       }
+
+       // See if it is in a final state, if that makes sense
+       switch v := object.(type) {
+       case *arvados.ContainerRequest:
+               if v.State == arvados.ContainerRequestStateFinal {
+                       reload = false
+                       logger.Debugf("Loaded object %s from local cache (%s)\n", uuid, file)
+               }
+       case *arvados.Container:
+               if v.State == arvados.ContainerStateComplete || v.State == arvados.ContainerStateCancelled {
+                       reload = false
+                       logger.Debugf("Loaded object %s from local cache (%s)\n", uuid, file)
+               }
+       }
+       return
+}
+
+// Load an Arvados object.
+func loadObject(logger *logrus.Logger, ac *arvados.Client, path string, uuid string, cache bool, object interface{}) (err error) {
+       file := uuid + ".json"
+
+       var reload bool
+       var cacheDir string
+
+       if !cache {
+               reload = true
+       } else {
+               homeDir, err := os.UserHomeDir()
+               if err != nil {
+                       reload = true
+                       logger.Info("Unable to determine current user home directory, not using cache")
+               } else {
+                       cacheDir = homeDir + "/.cache/arvados/costanalyzer/"
+                       err = ensureDirectory(logger, cacheDir)
+                       if err != nil {
+                               reload = true
+                               logger.Infof("Unable to create cache directory at %s, not using cache: %s", cacheDir, err.Error())
+                       } else {
+                               reload = loadCachedObject(logger, cacheDir+file, uuid, object)
+                       }
+               }
+       }
+       if !reload {
+               return
+       }
+
+       if strings.Contains(uuid, "-j7d0g-") {
+               err = ac.RequestAndDecode(&object, "GET", "arvados/v1/groups/"+uuid, nil, nil)
+       } else if strings.Contains(uuid, "-xvhdp-") {
+               err = ac.RequestAndDecode(&object, "GET", "arvados/v1/container_requests/"+uuid, nil, nil)
+       } else if strings.Contains(uuid, "-dz642-") {
+               err = ac.RequestAndDecode(&object, "GET", "arvados/v1/containers/"+uuid, nil, nil)
+       } else {
+               err = fmt.Errorf("unsupported object type with UUID %q:\n  %s", uuid, err)
+               return
+       }
+       if err != nil {
+               err = fmt.Errorf("error loading object with UUID %q:\n  %s", uuid, err)
+               return
+       }
+       encoded, err := json.MarshalIndent(object, "", " ")
+       if err != nil {
+               err = fmt.Errorf("error marshaling object with UUID %q:\n  %s", uuid, err)
+               return
+       }
+       if cacheDir != "" {
+               err = ioutil.WriteFile(cacheDir+file, encoded, 0644)
+               if err != nil {
+                       err = fmt.Errorf("error writing file %s:\n  %s", file, err)
+                       return
+               }
+       }
+       return
+}
+
+func getNode(arv *arvadosclient.ArvadosClient, ac *arvados.Client, kc *keepclient.KeepClient, cr arvados.ContainerRequest) (node nodeInfo, err error) {
+       if cr.LogUUID == "" {
+               err = errors.New("No log collection")
+               return
+       }
+
+       var collection arvados.Collection
+       err = ac.RequestAndDecode(&collection, "GET", "arvados/v1/collections/"+cr.LogUUID, nil, nil)
+       if err != nil {
+               err = fmt.Errorf("error getting collection: %s", err)
+               return
+       }
+
+       var fs arvados.CollectionFileSystem
+       fs, err = collection.FileSystem(ac, kc)
+       if err != nil {
+               err = fmt.Errorf("error opening collection as filesystem: %s", err)
+               return
+       }
+       var f http.File
+       f, err = fs.Open("node.json")
+       if err != nil {
+               err = fmt.Errorf("error opening file 'node.json' in collection %s: %s", cr.LogUUID, err)
+               return
+       }
+
+       err = json.NewDecoder(f).Decode(&node)
+       if err != nil {
+               err = fmt.Errorf("error reading file 'node.json' in collection %s: %s", cr.LogUUID, err)
+               return
+       }
+       return
+}
+
+func handleProject(logger *logrus.Logger, uuid string, arv *arvadosclient.ArvadosClient, ac *arvados.Client, kc *keepclient.KeepClient, resultsDir string, cache bool) (cost map[string]float64, err error) {
+
+       cost = make(map[string]float64)
+
+       var project arvados.Group
+       err = loadObject(logger, ac, uuid, uuid, cache, &project)
+       if err != nil {
+               return nil, fmt.Errorf("error loading object %s: %s", uuid, err.Error())
+       }
+
+       var childCrs map[string]interface{}
+       filterset := []arvados.Filter{
+               {
+                       Attr:     "owner_uuid",
+                       Operator: "=",
+                       Operand:  project.UUID,
+               },
+               {
+                       Attr:     "requesting_container_uuid",
+                       Operator: "=",
+                       Operand:  nil,
+               },
+       }
+       err = ac.RequestAndDecode(&childCrs, "GET", "arvados/v1/container_requests", nil, map[string]interface{}{
+               "filters": filterset,
+               "limit":   10000,
+       })
+       if err != nil {
+               return nil, fmt.Errorf("error querying container_requests: %s", err.Error())
+       }
+       if value, ok := childCrs["items"]; ok {
+               logger.Infof("Collecting top level container requests in project %s\n", uuid)
+               items := value.([]interface{})
+               for _, item := range items {
+                       itemMap := item.(map[string]interface{})
+                       crCsv, err := generateCrCsv(logger, itemMap["uuid"].(string), arv, ac, kc, resultsDir, cache)
+                       if err != nil {
+                               return nil, fmt.Errorf("error generating container_request CSV: %s", err.Error())
+                       }
+                       for k, v := range crCsv {
+                               cost[k] = v
+                       }
+               }
+       } else {
+               logger.Infof("No top level container requests found in project %s\n", uuid)
+       }
+       return
+}
+
+func generateCrCsv(logger *logrus.Logger, uuid string, arv *arvadosclient.ArvadosClient, ac *arvados.Client, kc *keepclient.KeepClient, resultsDir string, cache bool) (cost map[string]float64, err error) {
+
+       cost = make(map[string]float64)
+
+       csv := "CR UUID,CR name,Container UUID,State,Started At,Finished At,Duration in seconds,Compute node type,Hourly node cost,Total cost\n"
+       var tmpCsv string
+       var tmpTotalCost float64
+       var totalCost float64
+
+       // This is a container request, find the container
+       var cr arvados.ContainerRequest
+       err = loadObject(logger, ac, uuid, uuid, cache, &cr)
+       if err != nil {
+               return nil, fmt.Errorf("error loading cr object %s: %s", uuid, err)
+       }
+       var container arvados.Container
+       err = loadObject(logger, ac, uuid, cr.ContainerUUID, cache, &container)
+       if err != nil {
+               return nil, fmt.Errorf("error loading container object %s: %s", cr.ContainerUUID, err)
+       }
+
+       topNode, err := getNode(arv, ac, kc, cr)
+       if err != nil {
+               return nil, fmt.Errorf("error getting node %s: %s", cr.UUID, err)
+       }
+       tmpCsv, totalCost = addContainerLine(logger, topNode, cr, container)
+       csv += tmpCsv
+       totalCost += tmpTotalCost
+       cost[container.UUID] = totalCost
+
+       // Find all container requests that have the container we found above as requesting_container_uuid
+       var childCrs arvados.ContainerRequestList
+       filterset := []arvados.Filter{
+               {
+                       Attr:     "requesting_container_uuid",
+                       Operator: "=",
+                       Operand:  container.UUID,
+               }}
+       err = ac.RequestAndDecode(&childCrs, "GET", "arvados/v1/container_requests", nil, map[string]interface{}{
+               "filters": filterset,
+               "limit":   10000,
+       })
+       if err != nil {
+               return nil, fmt.Errorf("error querying container_requests: %s", err.Error())
+       }
+       logger.Infof("Collecting child containers for container request %s", uuid)
+       for _, cr2 := range childCrs.Items {
+               logger.Info(".")
+               node, err := getNode(arv, ac, kc, cr2)
+               if err != nil {
+                       return nil, fmt.Errorf("error getting node %s: %s", cr2.UUID, err)
+               }
+               logger.Debug("\nChild container: " + cr2.ContainerUUID + "\n")
+               var c2 arvados.Container
+               err = loadObject(logger, ac, uuid, cr2.ContainerUUID, cache, &c2)
+               if err != nil {
+                       return nil, fmt.Errorf("error loading object %s: %s", cr2.ContainerUUID, err)
+               }
+               tmpCsv, tmpTotalCost = addContainerLine(logger, node, cr2, c2)
+               cost[cr2.ContainerUUID] = tmpTotalCost
+               csv += tmpCsv
+               totalCost += tmpTotalCost
+       }
+       logger.Info(" done\n")
+
+       csv += "TOTAL,,,,,,,,," + strconv.FormatFloat(totalCost, 'f', 8, 64) + "\n"
+
+       // Write the resulting CSV file
+       fName := resultsDir + "/" + uuid + ".csv"
+       err = ioutil.WriteFile(fName, []byte(csv), 0644)
+       if err != nil {
+               return nil, fmt.Errorf("error writing file with path %s: %s", fName, err.Error())
+       }
+       logger.Infof("\nUUID report in %s\n\n", fName)
+
+       return
+}
+
+func costanalyzer(prog string, args []string, loader *config.Loader, logger *logrus.Logger, stdout, stderr io.Writer) (exitcode int, err error) {
+       exitcode, uuids, resultsDir, cache, err := parseFlags(prog, args, loader, logger, stderr)
+       if exitcode != 0 {
+               return
+       }
+       err = ensureDirectory(logger, resultsDir)
+       if err != nil {
+               exitcode = 3
+               return
+       }
+
+       // Arvados Client setup
+       arv, err := arvadosclient.MakeArvadosClient()
+       if err != nil {
+               err = fmt.Errorf("error creating Arvados object: %s", err)
+               exitcode = 1
+               return
+       }
+       kc, err := keepclient.MakeKeepClient(arv)
+       if err != nil {
+               err = fmt.Errorf("error creating Keep object: %s", err)
+               exitcode = 1
+               return
+       }
+
+       ac := arvados.NewClientFromEnv()
+
+       cost := make(map[string]float64)
+       for _, uuid := range uuids {
+               if strings.Contains(uuid, "-j7d0g-") {
+                       // This is a project (group)
+                       cost, err = handleProject(logger, uuid, arv, ac, kc, resultsDir, cache)
+                       if err != nil {
+                               exitcode = 1
+                               return
+                       }
+                       for k, v := range cost {
+                               cost[k] = v
+                       }
+               } else if strings.Contains(uuid, "-xvhdp-") {
+                       // This is a container request
+                       var crCsv map[string]float64
+                       crCsv, err = generateCrCsv(logger, uuid, arv, ac, kc, resultsDir, cache)
+                       if err != nil {
+                               err = fmt.Errorf("Error generating container_request CSV for uuid %s: %s", uuid, err.Error())
+                               exitcode = 2
+                               return
+                       }
+                       for k, v := range crCsv {
+                               cost[k] = v
+                       }
+               } else if strings.Contains(uuid, "-tpzed-") {
+                       // This is a user. The "Home" project for a user is not a real project.
+                       // It is identified by the user uuid. As such, cost analysis for the
+                       // "Home" project is not supported by this program. Skip this uuid, but
+                       // keep going.
+                       logger.Errorf("Cost analysis is not supported for the 'Home' project: %s", uuid)
+               }
+       }
+
+       if len(cost) == 0 {
+               logger.Info("Nothing to do!\n")
+               return
+       }
+
+       var csv string
+
+       csv = "# Aggregate cost accounting for uuids:\n"
+       for _, uuid := range uuids {
+               csv += "# " + uuid + "\n"
+       }
+
+       var total float64
+       for k, v := range cost {
+               csv += k + "," + strconv.FormatFloat(v, 'f', 8, 64) + "\n"
+               total += v
+       }
+
+       csv += "TOTAL," + strconv.FormatFloat(total, 'f', 8, 64) + "\n"
+
+       // Write the resulting CSV file
+       aFile := resultsDir + "/" + time.Now().Format("2006-01-02-15-04-05") + "-aggregate-costaccounting.csv"
+       err = ioutil.WriteFile(aFile, []byte(csv), 0644)
+       if err != nil {
+               err = fmt.Errorf("Error writing file with path %s: %s", aFile, err.Error())
+               exitcode = 1
+               return
+       }
+       logger.Infof("Aggregate cost accounting for all supplied uuids in %s\n", aFile)
+       return
+}
diff --git a/lib/costanalyzer/costanalyzer_test.go b/lib/costanalyzer/costanalyzer_test.go
new file mode 100644 (file)
index 0000000..4fab93b
--- /dev/null
@@ -0,0 +1,266 @@
+// Copyright (C) The Arvados Authors. All rights reserved.
+//
+// SPDX-License-Identifier: AGPL-3.0
+
+package costanalyzer
+
+import (
+       "bytes"
+       "io"
+       "io/ioutil"
+       "os"
+       "regexp"
+       "testing"
+
+       "git.arvados.org/arvados.git/sdk/go/arvados"
+       "git.arvados.org/arvados.git/sdk/go/arvadosclient"
+       "git.arvados.org/arvados.git/sdk/go/arvadostest"
+       "git.arvados.org/arvados.git/sdk/go/keepclient"
+       "gopkg.in/check.v1"
+)
+
+func Test(t *testing.T) {
+       check.TestingT(t)
+}
+
+var _ = check.Suite(&Suite{})
+
+type Suite struct{}
+
+func (s *Suite) TearDownSuite(c *check.C) {
+       // Undo any changes/additions to the database so they don't affect subsequent tests.
+       arvadostest.ResetEnv()
+}
+
+func (s *Suite) SetUpSuite(c *check.C) {
+       arvadostest.StartAPI()
+       arvadostest.StartKeep(2, true)
+
+       // Get the various arvados, arvadosclient, and keep client objects
+       ac := arvados.NewClientFromEnv()
+       arv, err := arvadosclient.MakeArvadosClient()
+       c.Assert(err, check.Equals, nil)
+       arv.ApiToken = arvadostest.ActiveToken
+       kc, err := keepclient.MakeKeepClient(arv)
+       c.Assert(err, check.Equals, nil)
+
+       standardE4sV3JSON := `{
+    "Name": "Standard_E4s_v3",
+    "ProviderType": "Standard_E4s_v3",
+    "VCPUs": 4,
+    "RAM": 34359738368,
+    "Scratch": 64000000000,
+    "IncludedScratch": 64000000000,
+    "AddedScratch": 0,
+    "Price": 0.292,
+    "Preemptible": false
+}`
+       standardD32sV3JSON := `{
+    "Name": "Standard_D32s_v3",
+    "ProviderType": "Standard_D32s_v3",
+    "VCPUs": 32,
+    "RAM": 137438953472,
+    "Scratch": 256000000000,
+    "IncludedScratch": 256000000000,
+    "AddedScratch": 0,
+    "Price": 1.76,
+    "Preemptible": false
+}`
+
+       standardA1V2JSON := `{
+    "Name": "a1v2",
+    "ProviderType": "Standard_A1_v2",
+    "VCPUs": 1,
+    "RAM": 2147483648,
+    "Scratch": 10000000000,
+    "IncludedScratch": 10000000000,
+    "AddedScratch": 0,
+    "Price": 0.043,
+    "Preemptible": false
+}`
+
+       standardA2V2JSON := `{
+    "Name": "a2v2",
+    "ProviderType": "Standard_A2_v2",
+    "VCPUs": 2,
+    "RAM": 4294967296,
+    "Scratch": 20000000000,
+    "IncludedScratch": 20000000000,
+    "AddedScratch": 0,
+    "Price": 0.091,
+    "Preemptible": false
+}`
+
+       legacyD1V2JSON := `{
+    "properties": {
+        "cloud_node": {
+            "price": 0.073001,
+            "size": "Standard_D1_v2"
+        },
+        "total_cpu_cores": 1,
+        "total_ram_mb": 3418,
+        "total_scratch_mb": 51170
+    }
+}`
+
+       // Our fixtures do not actually contain file contents. Populate the log collections we're going to use with the node.json file
+       createNodeJSON(c, arv, ac, kc, arvadostest.CompletedContainerRequestUUID, arvadostest.LogCollectionUUID, standardE4sV3JSON)
+       createNodeJSON(c, arv, ac, kc, arvadostest.CompletedContainerRequestUUID2, arvadostest.LogCollectionUUID2, standardD32sV3JSON)
+
+       createNodeJSON(c, arv, ac, kc, arvadostest.CompletedDiagnosticsContainerRequest1UUID, arvadostest.DiagnosticsContainerRequest1LogCollectionUUID, standardA1V2JSON)
+       createNodeJSON(c, arv, ac, kc, arvadostest.CompletedDiagnosticsContainerRequest2UUID, arvadostest.DiagnosticsContainerRequest2LogCollectionUUID, standardA1V2JSON)
+       createNodeJSON(c, arv, ac, kc, arvadostest.CompletedDiagnosticsHasher1ContainerRequestUUID, arvadostest.Hasher1LogCollectionUUID, standardA1V2JSON)
+       createNodeJSON(c, arv, ac, kc, arvadostest.CompletedDiagnosticsHasher2ContainerRequestUUID, arvadostest.Hasher2LogCollectionUUID, standardA2V2JSON)
+       createNodeJSON(c, arv, ac, kc, arvadostest.CompletedDiagnosticsHasher3ContainerRequestUUID, arvadostest.Hasher3LogCollectionUUID, legacyD1V2JSON)
+}
+
+func createNodeJSON(c *check.C, arv *arvadosclient.ArvadosClient, ac *arvados.Client, kc *keepclient.KeepClient, crUUID string, logUUID string, nodeJSON string) {
+       // Get the CR
+       var cr arvados.ContainerRequest
+       err := ac.RequestAndDecode(&cr, "GET", "arvados/v1/container_requests/"+crUUID, nil, nil)
+       c.Assert(err, check.Equals, nil)
+       c.Assert(cr.LogUUID, check.Equals, logUUID)
+
+       // Get the log collection
+       var coll arvados.Collection
+       err = ac.RequestAndDecode(&coll, "GET", "arvados/v1/collections/"+cr.LogUUID, nil, nil)
+       c.Assert(err, check.IsNil)
+
+       // Create a node.json file -- the fixture doesn't actually contain the contents of the collection.
+       fs, err := coll.FileSystem(ac, kc)
+       c.Assert(err, check.IsNil)
+       f, err := fs.OpenFile("node.json", os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0777)
+       c.Assert(err, check.IsNil)
+       _, err = io.WriteString(f, nodeJSON)
+       c.Assert(err, check.IsNil)
+       err = f.Close()
+       c.Assert(err, check.IsNil)
+
+       // Flush the data to Keep
+       mtxt, err := fs.MarshalManifest(".")
+       c.Assert(err, check.IsNil)
+       c.Assert(mtxt, check.NotNil)
+
+       // Update collection record
+       err = ac.RequestAndDecode(&coll, "PUT", "arvados/v1/collections/"+cr.LogUUID, nil, map[string]interface{}{
+               "collection": map[string]interface{}{
+                       "manifest_text": mtxt,
+               },
+       })
+       c.Assert(err, check.IsNil)
+}
+
+func (*Suite) TestUsage(c *check.C) {
+       var stdout, stderr bytes.Buffer
+       exitcode := Command.RunCommand("costanalyzer.test", []string{"-help", "-log-level=debug"}, &bytes.Buffer{}, &stdout, &stderr)
+       c.Check(exitcode, check.Equals, 1)
+       c.Check(stdout.String(), check.Equals, "")
+       c.Check(stderr.String(), check.Matches, `(?ms).*Usage:.*`)
+}
+
+func (*Suite) TestContainerRequestUUID(c *check.C) {
+       var stdout, stderr bytes.Buffer
+       // Run costanalyzer with 1 container request uuid
+       exitcode := Command.RunCommand("costanalyzer.test", []string{"-uuid", arvadostest.CompletedContainerRequestUUID, "-output", "results"}, &bytes.Buffer{}, &stdout, &stderr)
+       c.Check(exitcode, check.Equals, 0)
+       c.Assert(stderr.String(), check.Matches, "(?ms).*supplied uuids in .*")
+
+       uuidReport, err := ioutil.ReadFile("results/" + arvadostest.CompletedContainerRequestUUID + ".csv")
+       c.Assert(err, check.IsNil)
+       c.Check(string(uuidReport), check.Matches, "(?ms).*TOTAL,,,,,,,,,7.01302889")
+       re := regexp.MustCompile(`(?ms).*supplied uuids in (.*?)\n`)
+       matches := re.FindStringSubmatch(stderr.String()) // matches[1] contains a string like 'results/2020-11-02-18-57-45-aggregate-costaccounting.csv'
+
+       aggregateCostReport, err := ioutil.ReadFile(matches[1])
+       c.Assert(err, check.IsNil)
+
+       c.Check(string(aggregateCostReport), check.Matches, "(?ms).*TOTAL,7.01302889")
+}
+
+func (*Suite) TestDoubleContainerRequestUUID(c *check.C) {
+       var stdout, stderr bytes.Buffer
+       // Run costanalyzer with 2 container request uuids
+       exitcode := Command.RunCommand("costanalyzer.test", []string{"-uuid", arvadostest.CompletedContainerRequestUUID, "-uuid", arvadostest.CompletedContainerRequestUUID2, "-output", "results"}, &bytes.Buffer{}, &stdout, &stderr)
+       c.Check(exitcode, check.Equals, 0)
+       c.Assert(stderr.String(), check.Matches, "(?ms).*supplied uuids in .*")
+
+       uuidReport, err := ioutil.ReadFile("results/" + arvadostest.CompletedContainerRequestUUID + ".csv")
+       c.Assert(err, check.IsNil)
+       c.Check(string(uuidReport), check.Matches, "(?ms).*TOTAL,,,,,,,,,7.01302889")
+
+       uuidReport2, err := ioutil.ReadFile("results/" + arvadostest.CompletedContainerRequestUUID2 + ".csv")
+       c.Assert(err, check.IsNil)
+       c.Check(string(uuidReport2), check.Matches, "(?ms).*TOTAL,,,,,,,,,42.27031111")
+
+       re := regexp.MustCompile(`(?ms).*supplied uuids in (.*?)\n`)
+       matches := re.FindStringSubmatch(stderr.String()) // matches[1] contains a string like 'results/2020-11-02-18-57-45-aggregate-costaccounting.csv'
+
+       aggregateCostReport, err := ioutil.ReadFile(matches[1])
+       c.Assert(err, check.IsNil)
+
+       c.Check(string(aggregateCostReport), check.Matches, "(?ms).*TOTAL,49.28334000")
+       stdout.Truncate(0)
+       stderr.Truncate(0)
+
+       // Now move both container requests into an existing project, and then re-run
+       // the analysis with the project uuid. The results should be identical.
+       ac := arvados.NewClientFromEnv()
+       var cr arvados.ContainerRequest
+       err = ac.RequestAndDecode(&cr, "PUT", "arvados/v1/container_requests/"+arvadostest.CompletedContainerRequestUUID, nil, map[string]interface{}{
+               "container_request": map[string]interface{}{
+                       "owner_uuid": arvadostest.AProjectUUID,
+               },
+       })
+       c.Assert(err, check.IsNil)
+       err = ac.RequestAndDecode(&cr, "PUT", "arvados/v1/container_requests/"+arvadostest.CompletedContainerRequestUUID2, nil, map[string]interface{}{
+               "container_request": map[string]interface{}{
+                       "owner_uuid": arvadostest.AProjectUUID,
+               },
+       })
+       c.Assert(err, check.IsNil)
+
+       // Run costanalyzer with the project uuid
+       exitcode = Command.RunCommand("costanalyzer.test", []string{"-uuid", arvadostest.AProjectUUID, "-cache=false", "-log-level", "debug", "-output", "results"}, &bytes.Buffer{}, &stdout, &stderr)
+       c.Check(exitcode, check.Equals, 0)
+       c.Assert(stderr.String(), check.Matches, "(?ms).*supplied uuids in .*")
+
+       uuidReport, err = ioutil.ReadFile("results/" + arvadostest.CompletedContainerRequestUUID + ".csv")
+       c.Assert(err, check.IsNil)
+       c.Check(string(uuidReport), check.Matches, "(?ms).*TOTAL,,,,,,,,,7.01302889")
+
+       uuidReport2, err = ioutil.ReadFile("results/" + arvadostest.CompletedContainerRequestUUID2 + ".csv")
+       c.Assert(err, check.IsNil)
+       c.Check(string(uuidReport2), check.Matches, "(?ms).*TOTAL,,,,,,,,,42.27031111")
+
+       re = regexp.MustCompile(`(?ms).*supplied uuids in (.*?)\n`)
+       matches = re.FindStringSubmatch(stderr.String()) // matches[1] contains a string like 'results/2020-11-02-18-57-45-aggregate-costaccounting.csv'
+
+       aggregateCostReport, err = ioutil.ReadFile(matches[1])
+       c.Assert(err, check.IsNil)
+
+       c.Check(string(aggregateCostReport), check.Matches, "(?ms).*TOTAL,49.28334000")
+}
+
+func (*Suite) TestMultipleContainerRequestUUIDWithReuse(c *check.C) {
+       var stdout, stderr bytes.Buffer
+       // Run costanalyzer with 2 container request uuids
+       exitcode := Command.RunCommand("costanalyzer.test", []string{"-uuid", arvadostest.CompletedDiagnosticsContainerRequest1UUID, "-uuid", arvadostest.CompletedDiagnosticsContainerRequest2UUID, "-output", "results"}, &bytes.Buffer{}, &stdout, &stderr)
+       c.Check(exitcode, check.Equals, 0)
+       c.Assert(stderr.String(), check.Matches, "(?ms).*supplied uuids in .*")
+
+       uuidReport, err := ioutil.ReadFile("results/" + arvadostest.CompletedDiagnosticsContainerRequest1UUID + ".csv")
+       c.Assert(err, check.IsNil)
+       c.Check(string(uuidReport), check.Matches, "(?ms).*TOTAL,,,,,,,,,0.00916192")
+
+       uuidReport2, err := ioutil.ReadFile("results/" + arvadostest.CompletedDiagnosticsContainerRequest2UUID + ".csv")
+       c.Assert(err, check.IsNil)
+       c.Check(string(uuidReport2), check.Matches, "(?ms).*TOTAL,,,,,,,,,0.00588088")
+
+       re := regexp.MustCompile(`(?ms).*supplied uuids in (.*?)\n`)
+       matches := re.FindStringSubmatch(stderr.String()) // matches[1] contains a string like 'results/2020-11-02-18-57-45-aggregate-costaccounting.csv'
+
+       aggregateCostReport, err := ioutil.ReadFile(matches[1])
+       c.Assert(err, check.IsNil)
+
+       c.Check(string(aggregateCostReport), check.Matches, "(?ms).*TOTAL,0.01492030")
+}
index 5677f4deca5d70f16ab44d4023434f7e94fc73e2..aeb5a47e6d0559df094ee3cbec5432d3b3b8f2ce 100644 (file)
@@ -44,7 +44,27 @@ const (
 
        RunningContainerUUID = "zzzzz-dz642-runningcontainr"
 
-       CompletedContainerUUID = "zzzzz-dz642-compltcontainer"
+       CompletedContainerUUID         = "zzzzz-dz642-compltcontainer"
+       CompletedContainerRequestUUID  = "zzzzz-xvhdp-cr4completedctr"
+       CompletedContainerRequestUUID2 = "zzzzz-xvhdp-cr4completedcr2"
+
+       CompletedDiagnosticsContainerRequest1UUID     = "zzzzz-xvhdp-diagnostics0001"
+       CompletedDiagnosticsContainerRequest2UUID     = "zzzzz-xvhdp-diagnostics0002"
+       CompletedDiagnosticsContainer1UUID            = "zzzzz-dz642-diagcompreq0001"
+       CompletedDiagnosticsContainer2UUID            = "zzzzz-dz642-diagcompreq0002"
+       DiagnosticsContainerRequest1LogCollectionUUID = "zzzzz-4zz18-diagcompreqlog1"
+       DiagnosticsContainerRequest2LogCollectionUUID = "zzzzz-4zz18-diagcompreqlog2"
+
+       CompletedDiagnosticsHasher1ContainerRequestUUID = "zzzzz-xvhdp-diag1hasher0001"
+       CompletedDiagnosticsHasher2ContainerRequestUUID = "zzzzz-xvhdp-diag1hasher0002"
+       CompletedDiagnosticsHasher3ContainerRequestUUID = "zzzzz-xvhdp-diag1hasher0003"
+       CompletedDiagnosticsHasher1ContainerUUID        = "zzzzz-dz642-diagcomphasher1"
+       CompletedDiagnosticsHasher2ContainerUUID        = "zzzzz-dz642-diagcomphasher2"
+       CompletedDiagnosticsHasher3ContainerUUID        = "zzzzz-dz642-diagcomphasher3"
+
+       Hasher1LogCollectionUUID = "zzzzz-4zz18-dlogcollhash001"
+       Hasher2LogCollectionUUID = "zzzzz-4zz18-dlogcollhash002"
+       Hasher3LogCollectionUUID = "zzzzz-4zz18-dlogcollhash003"
 
        ArvadosRepoUUID = "zzzzz-s0uqq-arvadosrepo0123"
        ArvadosRepoName = "arvados"
@@ -73,6 +93,9 @@ const (
        TestVMUUID = "zzzzz-2x53u-382brsig8rp3064"
 
        CollectionWithUniqueWordsUUID = "zzzzz-4zz18-mnt690klmb51aud"
+
+       LogCollectionUUID  = "zzzzz-4zz18-logcollection01"
+       LogCollectionUUID2 = "zzzzz-4zz18-logcollection02"
 )
 
 // PathologicalManifest : A valid manifest designed to test
index a16ee8763f3f32016e76af30f74da1fda86be186..767f035b88cb824c47de95ef9571c5531c228c23 100644 (file)
@@ -1031,6 +1031,90 @@ collection_with_uri_prop:
   properties:
     "http://schema.org/example": "value1"
 
+log_collection:
+  uuid: zzzzz-4zz18-logcollection01
+  current_version_uuid: zzzzz-4zz18-logcollection01
+  portable_data_hash: 680c855fd6cf2c78778b3728b268925a+475
+  owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  created_at: 2020-10-29T00:51:44.075594000Z
+  modified_by_client_uuid: zzzzz-ozdt8-brczlopd8u8d0jr
+  modified_by_user_uuid: zzzzz-tpzed-d9tiejq69daie8f
+  modified_at: 2020-10-29T00:51:44.072109000Z
+  manifest_text: ". 8c12f5f5297b7337598170c6f531fcee+7882 0:0:arv-mount.txt 0:1910:container.json 1910:1264:crunch-run.txt 3174:1005:crunchstat.txt 4179:659:hoststat.txt 4838:2811:node-info.txt 7649:233:node.json 0:0:stderr.txt\n./log\\040for\\040container\\040ce8i5-dz642-h4kd64itncdcz8l 8c12f5f5297b7337598170c6f531fcee+7882 0:0:arv-mount.txt 0:1910:container.json 1910:1264:crunch-run.txt 3174:1005:crunchstat.txt 4179:659:hoststat.txt 4838:2811:node-info.txt 7649:233:node.json 0:0:stderr.txt\n"
+  name: a real log collection for a completed container
+
+log_collection2:
+  uuid: zzzzz-4zz18-logcollection02
+  current_version_uuid: zzzzz-4zz18-logcollection02
+  portable_data_hash: 680c855fd6cf2c78778b3728b268925a+475
+  owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  created_at: 2020-10-29T00:51:44.075594000Z
+  modified_by_client_uuid: zzzzz-ozdt8-brczlopd8u8d0jr
+  modified_by_user_uuid: zzzzz-tpzed-d9tiejq69daie8f
+  modified_at: 2020-10-29T00:51:44.072109000Z
+  manifest_text: ". 8c12f5f5297b7337598170c6f531fcee+7882 0:0:arv-mount.txt 0:1910:container.json 1910:1264:crunch-run.txt 3174:1005:crunchstat.txt 4179:659:hoststat.txt 4838:2811:node-info.txt 7649:233:node.json 0:0:stderr.txt\n./log\\040for\\040container\\040ce8i5-dz642-h4kd64itncdcz8l 8c12f5f5297b7337598170c6f531fcee+7882 0:0:arv-mount.txt 0:1910:container.json 1910:1264:crunch-run.txt 3174:1005:crunchstat.txt 4179:659:hoststat.txt 4838:2811:node-info.txt 7649:233:node.json 0:0:stderr.txt\n"
+  name: another real log collection for a completed container
+
+diagnostics_request_container_log_collection:
+  uuid: zzzzz-4zz18-diagcompreqlog1
+  current_version_uuid: zzzzz-4zz18-diagcompreqlog1
+  portable_data_hash: 680c855fd6cf2c78778b3728b268925a+475
+  owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  created_at: 2020-11-02T00:20:44.007557000Z
+  modified_by_client_uuid: zzzzz-ozdt8-brczlopd8u8d0jr
+  modified_by_user_uuid: zzzzz-tpzed-d9tiejq69daie8f
+  modified_at: 2020-11-02T00:20:44.005381000Z
+  manifest_text: ". 8c12f5f5297b7337598170c6f531fcee+7882 0:0:arv-mount.txt 0:1910:container.json 1910:1264:crunch-run.txt 3174:1005:crunchstat.txt 4179:659:hoststat.txt 4838:2811:node-info.txt 7649:233:node.json 0:0:stderr.txt\n./log\\040for\\040container\\040ce8i5-dz642-h4kd64itncdcz8l 8c12f5f5297b7337598170c6f531fcee+7882 0:0:arv-mount.txt 0:1910:container.json 1910:1264:crunch-run.txt 3174:1005:crunchstat.txt 4179:659:hoststat.txt 4838:2811:node-info.txt 7649:233:node.json 0:0:stderr.txt\n"
+  name: Container log for request zzzzz-xvhdp-diagnostics0001
+
+hasher1_log_collection:
+  uuid: zzzzz-4zz18-dlogcollhash001
+  current_version_uuid: zzzzz-4zz18-dlogcollhash001
+  portable_data_hash: 680c855fd6cf2c78778b3728b268925a+475
+  owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  created_at: 2020-11-02T00:16:55.272606000Z
+  modified_by_client_uuid: zzzzz-ozdt8-brczlopd8u8d0jr
+  modified_by_user_uuid: zzzzz-tpzed-d9tiejq69daie8f
+  modified_at: 2020-11-02T00:16:55.267006000Z
+  manifest_text: ". 8c12f5f5297b7337598170c6f531fcee+7882 0:0:arv-mount.txt 0:1910:container.json 1910:1264:crunch-run.txt 3174:1005:crunchstat.txt 4179:659:hoststat.txt 4838:2811:node-info.txt 7649:233:node.json 0:0:stderr.txt\n./log\\040for\\040container\\040ce8i5-dz642-h4kd64itncdcz8l 8c12f5f5297b7337598170c6f531fcee+7882 0:0:arv-mount.txt 0:1910:container.json 1910:1264:crunch-run.txt 3174:1005:crunchstat.txt 4179:659:hoststat.txt 4838:2811:node-info.txt 7649:233:node.json 0:0:stderr.txt\n"
+  name: hasher1 log collection
+
+hasher2_log_collection:
+  uuid: zzzzz-4zz18-dlogcollhash002
+  current_version_uuid: zzzzz-4zz18-dlogcollhash002
+  portable_data_hash: 680c855fd6cf2c78778b3728b268925a+475
+  owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  created_at: 2020-11-02T00:20:23.547251000Z
+  modified_by_client_uuid: zzzzz-ozdt8-brczlopd8u8d0jr
+  modified_by_user_uuid: zzzzz-tpzed-d9tiejq69daie8f
+  modified_at: 2020-11-02T00:20:23.545275000Z
+  manifest_text: ". 8c12f5f5297b7337598170c6f531fcee+7882 0:0:arv-mount.txt 0:1910:container.json 1910:1264:crunch-run.txt 3174:1005:crunchstat.txt 4179:659:hoststat.txt 4838:2811:node-info.txt 7649:233:node.json 0:0:stderr.txt\n./log\\040for\\040container\\040ce8i5-dz642-h4kd64itncdcz8l 8c12f5f5297b7337598170c6f531fcee+7882 0:0:arv-mount.txt 0:1910:container.json 1910:1264:crunch-run.txt 3174:1005:crunchstat.txt 4179:659:hoststat.txt 4838:2811:node-info.txt 7649:233:node.json 0:0:stderr.txt\n"
+  name: hasher2 log collection
+
+hasher3_log_collection:
+  uuid: zzzzz-4zz18-dlogcollhash003
+  current_version_uuid: zzzzz-4zz18-dlogcollhash003
+  portable_data_hash: 680c855fd6cf2c78778b3728b268925a+475
+  owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  created_at: 2020-11-02T00:20:38.789204000Z
+  modified_by_client_uuid: zzzzz-ozdt8-brczlopd8u8d0jr
+  modified_by_user_uuid: zzzzz-tpzed-d9tiejq69daie8f
+  modified_at: 2020-11-02T00:20:38.787329000Z
+  manifest_text: ". 8c12f5f5297b7337598170c6f531fcee+7882 0:0:arv-mount.txt 0:1910:container.json 1910:1264:crunch-run.txt 3174:1005:crunchstat.txt 4179:659:hoststat.txt 4838:2811:node-info.txt 7649:233:node.json 0:0:stderr.txt\n./log\\040for\\040container\\040ce8i5-dz642-h4kd64itncdcz8l 8c12f5f5297b7337598170c6f531fcee+7882 0:0:arv-mount.txt 0:1910:container.json 1910:1264:crunch-run.txt 3174:1005:crunchstat.txt 4179:659:hoststat.txt 4838:2811:node-info.txt 7649:233:node.json 0:0:stderr.txt\n"
+  name: hasher3 log collection
+
+diagnostics_request_container_log_collection2:
+  uuid: zzzzz-4zz18-diagcompreqlog2
+  current_version_uuid: zzzzz-4zz18-diagcompreqlog2
+  portable_data_hash: 680c855fd6cf2c78778b3728b268925a+475
+  owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  created_at: 2020-11-03T16:17:53.351593000Z
+  modified_by_client_uuid: zzzzz-ozdt8-brczlopd8u8d0jr
+  modified_by_user_uuid: zzzzz-tpzed-d9tiejq69daie8f
+  modified_at: 2020-11-03T16:17:53.346969000Z
+  manifest_text: ". 8c12f5f5297b7337598170c6f531fcee+7882 0:0:arv-mount.txt 0:1910:container.json 1910:1264:crunch-run.txt 3174:1005:crunchstat.txt 4179:659:hoststat.txt 4838:2811:node-info.txt 7649:233:node.json 0:0:stderr.txt\n./log\\040for\\040container\\040ce8i5-dz642-h4kd64itncdcz8l 8c12f5f5297b7337598170c6f531fcee+7882 0:0:arv-mount.txt 0:1910:container.json 1910:1264:crunch-run.txt 3174:1005:crunchstat.txt 4179:659:hoststat.txt 4838:2811:node-info.txt 7649:233:node.json 0:0:stderr.txt\n"
+  name: Container log for request zzzzz-xvhdp-diagnostics0002
+
 # Test Helper trims the rest of the file
 
 # Do not add your fixtures below this line as the rest of this file will be trimmed by test_helper
index ea86dca1784834d7ca0c37838c743aa785812a7b..ab0400a67854c47b3967fb84aca44265e5f7f227 100644 (file)
@@ -94,7 +94,7 @@ completed:
   output_path: test
   command: ["echo", "hello"]
   container_uuid: zzzzz-dz642-compltcontainer
-  log_uuid: zzzzz-4zz18-y9vne9npefyxh8g
+  log_uuid: zzzzz-4zz18-logcollection01
   output_uuid: zzzzz-4zz18-znfnqtbbv4spc3w
   runtime_constraints:
     vcpus: 1
@@ -115,10 +115,238 @@ completed-older:
   output_path: test
   command: ["arvados-cwl-runner", "echo", "hello"]
   container_uuid: zzzzz-dz642-compltcontainr2
+  log_uuid: zzzzz-4zz18-logcollection02
+  output_uuid: zzzzz-4zz18-znfnqtbbv4spc3w
   runtime_constraints:
     vcpus: 1
     ram: 123
 
+completed_diagnostics:
+  name: CWL diagnostics hasher
+  uuid: zzzzz-xvhdp-diagnostics0001
+  owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  state: Final
+  priority: 1
+  created_at: 2020-11-02T00:03:50.229364000Z
+  modified_at: 2020-11-02T00:20:44.041122000Z
+  modified_by_user_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  container_image: d967ef4a1ca90a096a39f5ce68e4a2e7+261
+  cwd: /var/spool/cwl
+  output_path: /var/spool/cwl
+  command: [
+             "arvados-cwl-runner",
+             "--local",
+             "--api=containers",
+             "--no-log-timestamps",
+             "--disable-validate",
+             "--disable-color",
+             "--eval-timeout=20",
+             "--thread-count=1",
+             "--disable-reuse",
+             "--collection-cache-size=256",
+             "--on-error=continue",
+             "/var/lib/cwl/workflow.json#main",
+             "/var/lib/cwl/cwl.input.json"
+           ]
+  container_uuid: zzzzz-dz642-diagcompreq0001
+  log_uuid: zzzzz-4zz18-diagcompreqlog1
+  output_uuid: zzzzz-4zz18-znfnqtbbv4spc3w
+  runtime_constraints:
+    vcpus: 1
+    ram: 1342177280
+    API: true
+
+completed_diagnostics_hasher1:
+  name: hasher1
+  uuid: zzzzz-xvhdp-diag1hasher0001
+  owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  state: Final
+  priority: 500
+  created_at: 2020-11-02T00:03:50.229364000Z
+  modified_at: 2020-11-02T00:20:44.041122000Z
+  modified_by_user_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  container_image: d967ef4a1ca90a096a39f5ce68e4a2e7+261
+  cwd: /var/spool/cwl
+  output_name: Output for step hasher1
+  output_path: /var/spool/cwl
+  command: [
+             "md5sum",
+             "/keep/9f26a86b6030a69ad222cf67d71c9502+65/hasher-input-file.txt"
+           ]
+  container_uuid: zzzzz-dz642-diagcomphasher1
+  requesting_container_uuid: zzzzz-dz642-diagcompreq0001
+  log_uuid: zzzzz-4zz18-dlogcollhash001
+  output_uuid: zzzzz-4zz18-znfnqtbbv4spc3w
+  runtime_constraints:
+    vcpus: 1
+    ram: 2684354560
+    API: true
+
+completed_diagnostics_hasher2:
+  name: hasher2
+  uuid: zzzzz-xvhdp-diag1hasher0002
+  owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  state: Final
+  priority: 500
+  created_at: 2020-11-02T00:17:07.067464000Z
+  modified_at: 2020-11-02T00:20:23.557498000Z
+  modified_by_user_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  container_image: d967ef4a1ca90a096a39f5ce68e4a2e7+261
+  cwd: /var/spool/cwl
+  output_name: Output for step hasher2
+  output_path: /var/spool/cwl
+  command: [
+             "md5sum",
+             "/keep/d3a687732e84061f3bae15dc7e313483+62/hasher1.md5sum.txt"
+           ]
+  container_uuid: zzzzz-dz642-diagcomphasher2
+  requesting_container_uuid: zzzzz-dz642-diagcompreq0001
+  log_uuid: zzzzz-4zz18-dlogcollhash002
+  output_uuid: zzzzz-4zz18-znfnqtbbv4spc3w
+  runtime_constraints:
+    vcpus: 2
+    ram: 2684354560
+    API: true
+
+completed_diagnostics_hasher3:
+  name: hasher3
+  uuid: zzzzz-xvhdp-diag1hasher0003
+  owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  state: Final
+  priority: 500
+  created_at: 2020-11-02T00:20:30.960251000Z
+  modified_at: 2020-11-02T00:20:38.799377000Z
+  modified_by_user_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  container_image: d967ef4a1ca90a096a39f5ce68e4a2e7+261
+  cwd: /var/spool/cwl
+  output_name: Output for step hasher3
+  output_path: /var/spool/cwl
+  command: [
+             "md5sum",
+             "/keep/6bd770f6cf8f83e7647c602eecfaeeb8+62/hasher2.md5sum.txt"
+           ]
+  container_uuid: zzzzz-dz642-diagcomphasher3
+  requesting_container_uuid: zzzzz-dz642-diagcompreq0001
+  log_uuid: zzzzz-4zz18-dlogcollhash003
+  output_uuid: zzzzz-4zz18-znfnqtbbv4spc3w
+  runtime_constraints:
+    vcpus: 1
+    ram: 2684354560
+    API: true
+
+completed_diagnostics2:
+  name: Copy of CWL diagnostics hasher
+  uuid: zzzzz-xvhdp-diagnostics0002
+  owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  state: Final
+  priority: 1
+  created_at: 2020-11-03T15:54:30.098485000Z
+  modified_at: 2020-11-03T16:17:53.406809000Z
+  modified_by_user_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  container_image: d967ef4a1ca90a096a39f5ce68e4a2e7+261
+  cwd: /var/spool/cwl
+  output_path: /var/spool/cwl
+  command: [
+             "arvados-cwl-runner",
+             "--local",
+             "--api=containers",
+             "--no-log-timestamps",
+             "--disable-validate",
+             "--disable-color",
+             "--eval-timeout=20",
+             "--thread-count=1",
+             "--disable-reuse",
+             "--collection-cache-size=256",
+             "--on-error=continue",
+             "/var/lib/cwl/workflow.json#main",
+             "/var/lib/cwl/cwl.input.json"
+           ]
+  container_uuid: zzzzz-dz642-diagcompreq0002
+  log_uuid: zzzzz-4zz18-diagcompreqlog2
+  output_uuid: zzzzz-4zz18-znfnqtbbv4spc3w
+  runtime_constraints:
+    vcpus: 1
+    ram: 1342177280
+    API: true
+
+completed_diagnostics_hasher1_reuse:
+  name: hasher1
+  uuid: zzzzz-xvhdp-diag2hasher0001
+  owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  state: Final
+  priority: 500
+  created_at: 2020-11-02T00:03:50.229364000Z
+  modified_at: 2020-11-02T00:20:44.041122000Z
+  modified_by_user_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  container_image: d967ef4a1ca90a096a39f5ce68e4a2e7+261
+  cwd: /var/spool/cwl
+  output_name: Output for step hasher1
+  output_path: /var/spool/cwl
+  command: [
+             "md5sum",
+             "/keep/9f26a86b6030a69ad222cf67d71c9502+65/hasher-input-file.txt"
+           ]
+  container_uuid: zzzzz-dz642-diagcomphasher1
+  requesting_container_uuid: zzzzz-dz642-diagcompreq0002
+  log_uuid: zzzzz-4zz18-dlogcollhash001
+  output_uuid: zzzzz-4zz18-znfnqtbbv4spc3w
+  runtime_constraints:
+    vcpus: 1
+    ram: 2684354560
+    API: true
+
+completed_diagnostics_hasher2_reuse:
+  name: hasher2
+  uuid: zzzzz-xvhdp-diag2hasher0002
+  owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  state: Final
+  priority: 500
+  created_at: 2020-11-02T00:17:07.067464000Z
+  modified_at: 2020-11-02T00:20:23.557498000Z
+  modified_by_user_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  container_image: d967ef4a1ca90a096a39f5ce68e4a2e7+261
+  cwd: /var/spool/cwl
+  output_name: Output for step hasher2
+  output_path: /var/spool/cwl
+  command: [
+             "md5sum",
+             "/keep/d3a687732e84061f3bae15dc7e313483+62/hasher1.md5sum.txt"
+           ]
+  container_uuid: zzzzz-dz642-diagcomphasher2
+  requesting_container_uuid: zzzzz-dz642-diagcompreq0002
+  log_uuid: zzzzz-4zz18-dlogcollhash002
+  output_uuid: zzzzz-4zz18-znfnqtbbv4spc3w
+  runtime_constraints:
+    vcpus: 2
+    ram: 2684354560
+    API: true
+
+completed_diagnostics_hasher3_reuse:
+  name: hasher3
+  uuid: zzzzz-xvhdp-diag2hasher0003
+  owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  state: Final
+  priority: 500
+  created_at: 2020-11-02T00:20:30.960251000Z
+  modified_at: 2020-11-02T00:20:38.799377000Z
+  modified_by_user_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  container_image: d967ef4a1ca90a096a39f5ce68e4a2e7+261
+  cwd: /var/spool/cwl
+  output_name: Output for step hasher3
+  output_path: /var/spool/cwl
+  command: [
+             "md5sum",
+             "/keep/6bd770f6cf8f83e7647c602eecfaeeb8+62/hasher2.md5sum.txt"
+           ]
+  container_uuid: zzzzz-dz642-diagcomphasher3
+  requesting_container_uuid: zzzzz-dz642-diagcompreq0002
+  log_uuid: zzzzz-4zz18-dlogcollhash003
+  output_uuid: zzzzz-4zz18-znfnqtbbv4spc3w
+  runtime_constraints:
+    vcpus: 1
+    ram: 2684354560
+    API: true
+
 requester:
   uuid: zzzzz-xvhdp-9zacv3o1xw6sxz5
   owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
@@ -309,7 +537,7 @@ completed_with_input_mounts:
     vcpus: 1
     ram: 123
   container_uuid: zzzzz-dz642-compltcontainer
-  log_uuid: zzzzz-4zz18-y9vne9npefyxh8g
+  log_uuid: zzzzz-4zz18-logcollection01
   output_uuid: zzzzz-4zz18-znfnqtbbv4spc3w
   mounts:
     /var/lib/cwl/cwl.input.json:
@@ -758,7 +986,7 @@ cr_in_trashed_project:
   output_path: test
   command: ["echo", "hello"]
   container_uuid: zzzzz-dz642-compltcontainer
-  log_uuid: zzzzz-4zz18-y9vne9npefyxh8g
+  log_uuid: zzzzz-4zz18-logcollection01
   output_uuid: zzzzz-4zz18-znfnqtbbv4spc3w
   runtime_constraints:
     vcpus: 1
index f18adb5dbd7d1ad56c2575984a668240e21479a6..b7d082771a0b37f2c3760bae23c46591805e07ef 100644 (file)
@@ -126,6 +126,153 @@ completed_older:
   secret_mounts: {}
   secret_mounts_md5: 99914b932bd37a50b983c5e7c90ae93b
 
+diagnostics_completed_requester:
+  uuid: zzzzz-dz642-diagcompreq0001
+  owner_uuid: zzzzz-tpzed-000000000000000
+  state: Complete
+  exit_code: 0
+  priority: 562948349145881771
+  created_at: 2020-11-02T00:03:50.192697000Z
+  modified_at: 2020-11-02T00:20:43.987275000Z
+  started_at: 2020-11-02T00:08:07.186711000Z
+  finished_at: 2020-11-02T00:20:43.975416000Z
+  container_image: d967ef4a1ca90a096a39f5ce68e4a2e7+261
+  cwd: /var/spool/cwl
+  log: 6129e376cb05c942f75a0c36083383e8+244
+  output: 1f4b0bc7583c2a7f9102c395f4ffc5e3+45
+  output_path: /var/spool/cwl
+  command: [
+             "arvados-cwl-runner",
+             "--local",
+             "--api=containers",
+             "--no-log-timestamps",
+             "--disable-validate",
+             "--disable-color",
+             "--eval-timeout=20",
+             "--thread-count=1",
+             "--disable-reuse",
+             "--collection-cache-size=256",
+             "--on-error=continue",
+             "/var/lib/cwl/workflow.json#main",
+             "/var/lib/cwl/cwl.input.json"
+           ]
+  runtime_constraints:
+    API: true
+    keep_cache_ram: 268435456
+    ram: 1342177280
+    vcpus: 1
+
+diagnostics_completed_hasher1:
+  uuid: zzzzz-dz642-diagcomphasher1
+  owner_uuid: zzzzz-tpzed-000000000000000
+  state: Complete
+  exit_code: 0
+  priority: 562948349145881771
+  created_at: 2020-11-02T00:08:18.829222000Z
+  modified_at: 2020-11-02T00:16:55.142023000Z
+  started_at: 2020-11-02T00:16:52.375871000Z
+  finished_at: 2020-11-02T00:16:55.105985000Z
+  container_image: d967ef4a1ca90a096a39f5ce68e4a2e7+261
+  cwd: /var/spool/cwl
+  log: fed8fb19fe8e3a320c29fed0edab12dd+220
+  output: d3a687732e84061f3bae15dc7e313483+62
+  output_path: /var/spool/cwl
+  command: [
+             "md5sum",
+             "/keep/9f26a86b6030a69ad222cf67d71c9502+65/hasher-input-file.txt"
+           ]
+  runtime_constraints:
+    API: true
+    keep_cache_ram: 268435456
+    ram: 268435456
+    vcpus: 1
+
+diagnostics_completed_hasher2:
+  uuid: zzzzz-dz642-diagcomphasher2
+  owner_uuid: zzzzz-tpzed-000000000000000
+  state: Complete
+  exit_code: 0
+  priority: 562948349145881771
+  created_at: 2020-11-02T00:17:07.026493000Z
+  modified_at: 2020-11-02T00:20:23.505908000Z
+  started_at: 2020-11-02T00:20:21.513185000Z
+  finished_at: 2020-11-02T00:20:23.478317000Z
+  container_image: d967ef4a1ca90a096a39f5ce68e4a2e7+261
+  cwd: /var/spool/cwl
+  log: 4fc03b95fc2646b0dec7383dbb7d56d8+221
+  output: 6bd770f6cf8f83e7647c602eecfaeeb8+62
+  output_path: /var/spool/cwl
+  command: [
+             "md5sum",
+             "/keep/d3a687732e84061f3bae15dc7e313483+62/hasher1.md5sum.txt"
+           ]
+  runtime_constraints:
+    API: true
+    keep_cache_ram: 268435456
+    ram: 268435456
+    vcpus: 2
+
+diagnostics_completed_hasher3:
+  uuid: zzzzz-dz642-diagcomphasher3
+  owner_uuid: zzzzz-tpzed-000000000000000
+  state: Complete
+  exit_code: 0
+  priority: 562948349145881771
+  created_at: 2020-11-02T00:20:30.943856000Z
+  modified_at: 2020-11-02T00:20:38.746541000Z
+  started_at: 2020-11-02T00:20:36.748957000Z
+  finished_at: 2020-11-02T00:20:38.732199000Z
+  container_image: d967ef4a1ca90a096a39f5ce68e4a2e7+261
+  cwd: /var/spool/cwl
+  log: 1eeaf70de0f65b1346e54c59f09e848d+210
+  output: 11b5fdaa380102e760c3eb6de80a9876+62
+  output_path: /var/spool/cwl
+  command: [
+             "md5sum",
+             "/keep/6bd770f6cf8f83e7647c602eecfaeeb8+62/hasher2.md5sum.txt"
+           ]
+  runtime_constraints:
+    API: true
+    keep_cache_ram: 268435456
+    ram: 268435456
+    vcpus: 1
+
+diagnostics_completed_requester2:
+  uuid: zzzzz-dz642-diagcompreq0002
+  owner_uuid: zzzzz-tpzed-000000000000000
+  state: Complete
+  exit_code: 0
+  priority: 1124295487972526
+  created_at: 2020-11-03T15:54:36.504661000Z
+  modified_at: 2020-11-03T16:17:53.242868000Z
+  started_at: 2020-11-03T16:09:51.123659000Z
+  finished_at: 2020-11-03T16:17:53.220358000Z
+  container_image: d967ef4a1ca90a096a39f5ce68e4a2e7+261
+  cwd: /var/spool/cwl
+  log: f1933bf5191f576613ea7f65bd0ead53+244
+  output: 941b71a57208741ce8742eca62352fb1+123
+  output_path: /var/spool/cwl
+  command: [
+             "arvados-cwl-runner",
+             "--local",
+             "--api=containers",
+             "--no-log-timestamps",
+             "--disable-validate",
+             "--disable-color",
+             "--eval-timeout=20",
+             "--thread-count=1",
+             "--disable-reuse",
+             "--collection-cache-size=256",
+             "--on-error=continue",
+             "/var/lib/cwl/workflow.json#main",
+             "/var/lib/cwl/cwl.input.json"
+           ]
+  runtime_constraints:
+    API: true
+    keep_cache_ram: 268435456
+    ram: 1342177280
+    vcpus: 1
+
 requester:
   uuid: zzzzz-dz642-requestingcntnr
   owner_uuid: zzzzz-tpzed-000000000000000