+// Copyright (C) The Lightning Authors. All rights reserved.
+//
+// SPDX-License-Identifier: AGPL-3.0
+
package lightning
import (
"net/http"
_ "net/http/pprof"
"os"
- "path"
"sort"
"strconv"
"strings"
+ "sync"
+ "sync/atomic"
"git.arvados.org/arvados.git/sdk/go/arvados"
+ "github.com/arvados/lightning/hgvs"
"github.com/kshedden/gonpy"
"github.com/sirupsen/logrus"
log "github.com/sirupsen/logrus"
runlocal := flags.Bool("local", false, "run on local host (default: run in an arvados container)")
projectUUID := flags.String("project", "", "project `UUID` for output data")
priority := flags.Int("priority", 500, "container request priority")
- inputFilename := flags.String("i", "-", "input `file`")
- outputFilename := flags.String("o", "-", "output `file`")
+ inputDir := flags.String("input-dir", "./in", "input `directory`")
+ outputDir := flags.String("output-dir", "./out", "output `directory`")
annotationsFilename := flags.String("output-annotations", "", "output `file` for tile variant annotations csv")
librefsFilename := flags.String("output-onehot2tilevar", "", "when using -one-hot, create csv `file` mapping column# to tag# and variant#")
labelsFilename := flags.String("output-labels", "", "output `file` for genome labels csv")
regionsFilename := flags.String("regions", "", "only output columns/annotations that intersect regions in specified bed `file`")
+ expandRegions := flags.Int("expand-regions", 0, "expand specified regions by `N` base pairs on each side`")
onehot := flags.Bool("one-hot", false, "recode tile variants as one-hot")
chunks := flags.Int("chunks", 1, "split output into `N` numpy files")
cmd.filter.Flags(flags)
return 0
} else if err != nil {
return 2
+ } else if flags.NArg() > 0 {
+ err = fmt.Errorf("errant command line arguments after parsed flags: %v", flags.Args())
+ return 2
}
if *pprof != "" {
}
if !*runlocal {
- if *outputFilename != "-" {
- err = errors.New("cannot specify output file in container mode: not implemented")
- return 1
- }
runner := arvadosContainerRunner{
Name: "lightning export-numpy",
Client: arvados.NewClientFromEnv(),
ProjectUUID: *projectUUID,
- RAM: 450000000000,
- VCPUs: 32,
+ RAM: 500000000000,
+ VCPUs: 96,
Priority: *priority,
KeepCache: 1,
APIAccess: true,
}
- err = runner.TranslatePaths(inputFilename, regionsFilename)
+ err = runner.TranslatePaths(inputDir, regionsFilename)
if err != nil {
return 1
}
runner.Args = []string{"export-numpy", "-local=true",
+ "-pprof", ":6060",
fmt.Sprintf("-one-hot=%v", *onehot),
- "-i", *inputFilename,
- "-o", "/mnt/output/matrix.npy",
+ "-input-dir", *inputDir,
+ "-output-dir", "/mnt/output",
"-output-annotations", "/mnt/output/annotations.csv",
"-output-onehot2tilevar", "/mnt/output/onehot2tilevar.csv",
"-output-labels", "/mnt/output/labels.csv",
"-regions", *regionsFilename,
- "-max-variants", fmt.Sprintf("%d", cmd.filter.MaxVariants),
- "-min-coverage", fmt.Sprintf("%f", cmd.filter.MinCoverage),
- "-max-tag", fmt.Sprintf("%d", cmd.filter.MaxTag),
+ "-expand-regions", fmt.Sprintf("%d", *expandRegions),
"-chunks", fmt.Sprintf("%d", *chunks),
}
+ runner.Args = append(runner.Args, cmd.filter.Args()...)
var output string
output, err = runner.Run()
if err != nil {
return 0
}
- var input io.ReadCloser
- if *inputFilename == "-" {
- input = ioutil.NopCloser(stdin)
- } else {
- input, err = open(*inputFilename)
- if err != nil {
- return 1
- }
- defer input.Close()
- }
- input = ioutil.NopCloser(bufio.NewReaderSize(input, 8*1024*1024))
tilelib := &tileLibrary{
retainNoCalls: true,
retainTileSequences: true,
compactGenomes: map[string][]tileVariantID{},
}
- err = tilelib.LoadGob(context.Background(), input, strings.HasSuffix(*inputFilename, ".gz"), nil)
- if err != nil {
- return 1
- }
- err = input.Close()
+ err = tilelib.LoadDir(context.Background(), *inputDir)
if err != nil {
return 1
}
return 1
}
defer f.Close()
- _, outBasename := path.Split(*outputFilename)
+ outBasename := "matrix.npy"
for i, name := range names {
_, err = fmt.Fprintf(f, "%d,%q,%q\n", i, trimFilenameForLabel(name), outBasename)
if err != nil {
}
log.Info("determining which tiles intersect given regions")
- dropTiles, err := chooseTiles(tilelib, *regionsFilename)
+ dropTiles, err := chooseTiles(tilelib, *regionsFilename, *expandRegions)
if err != nil {
return 1
}
+ annotation2tvs := map[string]map[hgvs.Variant][]tileLibRef{}
if *annotationsFilename != "" {
log.Info("writing annotations")
var annow io.WriteCloser
return 1
}
defer annow.Close()
- err = (&annotatecmd{maxTileSize: 5000, dropTiles: dropTiles}).exportTileDiffs(annow, tilelib)
+ var mtx sync.Mutex
+ err = (&annotatecmd{
+ maxTileSize: 5000,
+ dropTiles: dropTiles,
+ reportAnnotation: func(tag tagID, _ int, variant tileVariantID, refname string, seqname string, pdi hgvs.Variant) {
+ mtx.Lock()
+ defer mtx.Unlock()
+ if annotation2tvs[seqname] == nil {
+ annotation2tvs[seqname] = map[hgvs.Variant][]tileLibRef{}
+ }
+ annotation2tvs[seqname][pdi] = append(annotation2tvs[seqname][pdi], tileLibRef{Tag: tag, Variant: variant})
+ },
+ }).exportTileDiffs(annow, tilelib)
if err != nil {
return 1
}
}
}
+ var lastErr atomic.Value
+ var wg sync.WaitGroup
+ for seqname, pdivars := range annotation2tvs {
+ seqname, pdivars := seqname, pdivars
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ log.Infof("choosing hgvs columns for seq %s", seqname)
+ var pdis []hgvs.Variant
+ for pdi, librefs := range pdivars {
+ // Include this HGVS column if it was
+ // seen in a variant of any
+ // non-dropped tile.
+ for _, libref := range librefs {
+ if int(libref.Tag) >= len(dropTiles) || !dropTiles[libref.Tag] {
+ pdis = append(pdis, pdi)
+ break
+ }
+ }
+ }
+ sort.Slice(pdis, func(i, j int) bool {
+ if cmp := pdis[i].Position - pdis[j].Position; cmp != 0 {
+ return cmp < 0
+ } else if pdis[i].Ref != pdis[j].Ref {
+ return pdis[i].Ref < pdis[j].Ref
+ } else {
+ return pdis[i].New < pdis[j].New
+ }
+ })
+ log.Infof("writing column labels for seq %s", seqname)
+ var buf bytes.Buffer
+ for _, pdi := range pdis {
+ fmt.Fprintf(&buf, "%s:g.%s\n", seqname, pdi.String())
+ }
+ err := ioutil.WriteFile(*outputDir+"/"+seqname+".columns.csv", buf.Bytes(), 0777)
+ if err != nil {
+ lastErr.Store(err)
+ return
+ }
+
+ log.Infof("building hgvs matrix for seq %s", seqname)
+ data := make([]int8, len(names)*len(pdis)*2)
+ for row, name := range names {
+ cg := tilelib.compactGenomes[name]
+ rowstart := row * len(pdis) * 2
+ for col, pdi := range pdis {
+ for _, libref := range pdivars[pdi] {
+ if len(cg) <= int(libref.Tag)*2+1 {
+ continue
+ }
+ for phase := 0; phase < 2; phase++ {
+ if cg[int(libref.Tag)*2+phase] == libref.Variant {
+ data[rowstart+col*2+phase] = 1
+ }
+ }
+ }
+ }
+ }
+ log.Infof("writing hgvs numpy for seq %s", seqname)
+ f, err := os.OpenFile(*outputDir+"/"+seqname+".npy", os.O_CREATE|os.O_WRONLY, 0777)
+ if err != nil {
+ lastErr.Store(err)
+ return
+ }
+ defer f.Close()
+ // gonpy closes our writer and ignores errors. Give it a nopCloser so we can close f properly.
+ npw, err := gonpy.NewWriter(nopCloser{f})
+ if err != nil {
+ lastErr.Store(err)
+ return
+ }
+ npw.Shape = []int{len(names), len(pdis) * 2}
+ err = npw.WriteInt8(data)
+ if err != nil {
+ lastErr.Store(err)
+ return
+ }
+ err = f.Close()
+ if err != nil {
+ lastErr.Store(err)
+ return
+ }
+ }()
+ }
+ wg.Wait()
+ if e, ok := lastErr.Load().(error); ok {
+ err = e
+ return 1
+ }
+
chunksize := (len(tilelib.variant) + *chunks - 1) / *chunks
for chunk := 0; chunk < *chunks; chunk++ {
log.Infof("preparing chunk %d of %d", chunk+1, *chunks)
var npw *gonpy.NpyWriter
var output io.WriteCloser
- fnm := *outputFilename
- if fnm == "-" {
- output = nopCloser{stdout}
- } else {
- if *chunks > 1 {
- if strings.HasSuffix(fnm, ".npy") {
- fnm = fmt.Sprintf("%s.%d.npy", fnm[:len(fnm)-4], chunk)
- } else {
- fnm = fmt.Sprintf("%s.%d", fnm, chunk)
- }
- }
- output, err = os.OpenFile(fnm, os.O_CREATE|os.O_WRONLY, 0777)
- if err != nil {
- return 1
- }
- defer output.Close()
+ fnm := *outputDir + "/matrix.npy"
+ if *chunks > 1 {
+ fnm = fmt.Sprintf("%s/matrix.%d.npy", *outputDir, chunk)
+ }
+ output, err = os.OpenFile(fnm, os.O_CREATE|os.O_WRONLY, 0777)
+ if err != nil {
+ return 1
}
+ defer output.Close()
bufw := bufio.NewWriter(output)
npw, err = gonpy.NewWriter(nopCloser{bufw})
if err != nil {
for tag, variants := range tilelib.variant {
lq := lowqual[tag]
for varidx, hash := range variants {
- if len(tilelib.seq[hash]) == 0 {
+ if len(tilelib.hashSequence(hash)) == 0 {
if lq == nil {
lq = map[tileVariantID]bool{}
lowqual[tag] = lq
return
}
-func chooseTiles(tilelib *tileLibrary, regionsFilename string) (drop []bool, err error) {
- if regionsFilename == "" {
- return
- }
+func makeMask(regionsFilename string, expandRegions int) (*mask, error) {
+ log.Printf("makeMask: reading %s", regionsFilename)
rfile, err := zopen(regionsFilename)
if err != nil {
- return
+ return nil, err
}
defer rfile.Close()
- regions, err := ioutil.ReadAll(rfile)
+ regions, err := io.ReadAll(rfile)
if err != nil {
- return
+ return nil, err
}
- log.Print("chooseTiles: building mask")
- mask := &mask{}
+ log.Print("makeMask: building mask")
+ var mask mask
for _, line := range bytes.Split(regions, []byte{'\n'}) {
if bytes.HasPrefix(line, []byte{'#'}) {
continue
// GFF/GTF
end++
} else {
- err = fmt.Errorf("cannot parse input line as BED or GFF/GTF: %q", line)
- return
+ return nil, fmt.Errorf("cannot parse input line as BED or GFF/GTF: %q", line)
}
}
- mask.Add(refseqname, start, end)
+ mask.Add(refseqname, start-expandRegions, end+expandRegions)
}
- log.Print("chooseTiles: mask.Freeze")
+ log.Print("makeMask: mask.Freeze")
mask.Freeze()
+ return &mask, nil
+}
+
+func chooseTiles(tilelib *tileLibrary, regionsFilename string, expandRegions int) (drop []bool, err error) {
+ if regionsFilename == "" {
+ return
+ }
+ mask, err := makeMask(regionsFilename, expandRegions)
+ if err != nil {
+ return
+ }
tagset := tilelib.taglib.Tags()
if len(tagset) == 0 {
s = strings.TrimSuffix(s, ".2")
s = strings.TrimSuffix(s, ".gz")
s = strings.TrimSuffix(s, ".vcf")
+ s = strings.Replace(s, ",", "-", -1)
return s
}